Esempio n. 1
0
class RHeader(Reference):
    msg_id = Unicode()
    msg_type = Unicode()
    session = Unicode()
    username = Unicode()
class InteractiveShellApp(Configurable):
    """A Mixin for applications that start InteractiveShell instances.
    
    Provides configurables for loading extensions and executing files
    as part of configuring a Shell environment.
    
    Provides init_extensions() and init_code() methods, to be called
    after init_shell(), which must be implemented by subclasses.
    """
    extensions = List(
        Unicode,
        config=True,
        help="A list of dotted module names of IPython extensions to load.")
    extra_extension = Unicode(
        '',
        config=True,
        help="dotted module name of an IPython extension to load.")

    def _extra_extension_changed(self, name, old, new):
        if new:
            # add to self.extensions
            self.extensions.append(new)

    # Extensions that are always loaded (not configurable)
    default_extensions = List(Unicode, [u'storemagic'], config=False)

    exec_files = List(Unicode,
                      config=True,
                      help="""List of files to run at IPython startup.""")
    file_to_run = Unicode('', config=True, help="""A file to be run""")

    exec_lines = List(Unicode,
                      config=True,
                      help="""lines of code to run at IPython startup.""")
    code_to_run = Unicode('',
                          config=True,
                          help="Execute the given command string.")
    pylab_import_all = Bool(
        True,
        config=True,
        help="""If true, an 'import *' is done from numpy and pylab,
        when using pylab""")
    shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')

    def init_shell(self):
        raise NotImplementedError("Override in subclasses")

    def init_extensions(self):
        """Load all IPython extensions in IPythonApp.extensions.

        This uses the :meth:`ExtensionManager.load_extensions` to load all
        the extensions listed in ``self.extensions``.
        """
        try:
            self.log.debug("Loading IPython extensions...")
            extensions = self.default_extensions + self.extensions
            for ext in extensions:
                try:
                    self.log.info("Loading IPython extension: %s" % ext)
                    self.shell.extension_manager.load_extension(ext)
                except:
                    self.log.warn("Error in loading extension: %s" % ext +
                                  "\nCheck your config files in %s" %
                                  self.profile_dir.location)
                    self.shell.showtraceback()
        except:
            self.log.warn("Unknown error in loading extensions:")
            self.shell.showtraceback()

    def init_code(self):
        """run the pre-flight code, specified via exec_lines"""
        self._run_startup_files()
        self._run_exec_lines()
        self._run_exec_files()
        self._run_cmd_line_code()

        # Hide variables defined here from %who etc.
        self.shell.user_ns_hidden.update(self.shell.user_ns)

    def _run_exec_lines(self):
        """Run lines of code in IPythonApp.exec_lines in the user's namespace."""
        if not self.exec_lines:
            return
        try:
            self.log.debug("Running code from IPythonApp.exec_lines...")
            for line in self.exec_lines:
                try:
                    self.log.info("Running code in user namespace: %s" % line)
                    self.shell.run_cell(line, store_history=False)
                except:
                    self.log.warn("Error in executing line in user "
                                  "namespace: %s" % line)
                    self.shell.showtraceback()
        except:
            self.log.warn("Unknown error in handling IPythonApp.exec_lines:")
            self.shell.showtraceback()

    def _exec_file(self, fname):
        try:
            full_filename = filefind(fname, [u'.', self.ipython_dir])
        except IOError as e:
            self.log.warn("File not found: %r" % fname)
            return
        # Make sure that the running script gets a proper sys.argv as if it
        # were run from a system shell.
        save_argv = sys.argv
        sys.argv = [full_filename] + self.extra_args[1:]
        # protect sys.argv from potential unicode strings on Python 2:
        if not py3compat.PY3:
            sys.argv = [py3compat.cast_bytes(a) for a in sys.argv]
        try:
            if os.path.isfile(full_filename):
                if full_filename.endswith('.ipy'):
                    self.log.info("Running file in user namespace: %s" %
                                  full_filename)
                    self.shell.safe_execfile_ipy(full_filename)
                else:
                    # default to python, even without extension
                    self.log.info("Running file in user namespace: %s" %
                                  full_filename)
                    # Ensure that __file__ is always defined to match Python behavior
                    self.shell.user_ns['__file__'] = fname
                    try:
                        self.shell.safe_execfile(full_filename,
                                                 self.shell.user_ns)
                    finally:
                        del self.shell.user_ns['__file__']
        finally:
            sys.argv = save_argv

    def _run_startup_files(self):
        """Run files from profile startup directory"""
        startup_dir = self.profile_dir.startup_dir
        startup_files = glob.glob(os.path.join(startup_dir, '*.py'))
        startup_files += glob.glob(os.path.join(startup_dir, '*.ipy'))
        if not startup_files:
            return

        self.log.debug("Running startup files from %s...", startup_dir)
        try:
            for fname in sorted(startup_files):
                self._exec_file(fname)
        except:
            self.log.warn("Unknown error in handling startup files:")
            self.shell.showtraceback()

    def _run_exec_files(self):
        """Run files from IPythonApp.exec_files"""
        if not self.exec_files:
            return

        self.log.debug("Running files in IPythonApp.exec_files...")
        try:
            for fname in self.exec_files:
                self._exec_file(fname)
        except:
            self.log.warn("Unknown error in handling IPythonApp.exec_files:")
            self.shell.showtraceback()

    def _run_cmd_line_code(self):
        """Run code or file specified at the command-line"""
        if self.code_to_run:
            line = self.code_to_run
            try:
                self.log.info("Running code given at command line (c=): %s" %
                              line)
                self.shell.run_cell(line, store_history=False)
            except:
                self.log.warn("Error in executing line in user namespace: %s" %
                              line)
                self.shell.showtraceback()

        # Like Python itself, ignore the second if the first of these is present
        elif self.file_to_run:
            fname = self.file_to_run
            try:
                self._exec_file(fname)
            except:
                self.log.warn("Error in executing file in user namespace: %s" %
                              fname)
                self.shell.showtraceback()
Esempio n. 3
0
class Session(Configurable):
    """Object for handling serialization and sending of messages.

    The Session object handles building messages and sending them
    with ZMQ sockets or ZMQStream objects.  Objects can communicate with each
    other over the network via Session objects, and only need to work with the
    dict-based IPython message spec. The Session will handle
    serialization/deserialization, security, and metadata.

    Sessions support configurable serialiization via packer/unpacker traits,
    and signing with HMAC digests via the key/keyfile traits.

    Parameters
    ----------

    debug : bool
        whether to trigger extra debugging statements
    packer/unpacker : str : 'json', 'pickle' or import_string
        importstrings for methods to serialize message parts.  If just
        'json' or 'pickle', predefined JSON and pickle packers will be used.
        Otherwise, the entire importstring must be used.

        The functions must accept at least valid JSON input, and output *bytes*.

        For example, to use msgpack:
        packer = 'msgpack.packb', unpacker='msgpack.unpackb'
    pack/unpack : callables
        You can also set the pack/unpack callables for serialization directly.
    session : bytes
        the ID of this Session object.  The default is to generate a new UUID.
    username : unicode
        username added to message headers.  The default is to ask the OS.
    key : bytes
        The key used to initialize an HMAC signature.  If unset, messages
        will not be signed or checked.
    keyfile : filepath
        The file containing a key.  If this is set, `key` will be initialized
        to the contents of the file.

    """

    debug = Bool(False, config=True, help="""Debug output in the Session""")

    packer = DottedObjectName(
        'json',
        config=True,
        help="""The name of the packer for serializing messages.
            Should be one of 'json', 'pickle', or an import name
            for a custom callable serializer.""")

    def _packer_changed(self, name, old, new):
        if new.lower() == 'json':
            self.pack = json_packer
            self.unpack = json_unpacker
            self.unpacker = new
        elif new.lower() == 'pickle':
            self.pack = pickle_packer
            self.unpack = pickle_unpacker
            self.unpacker = new
        else:
            self.pack = import_item(str(new))

    unpacker = DottedObjectName(
        'json',
        config=True,
        help="""The name of the unpacker for unserializing messages.
        Only used with custom functions for `packer`.""")

    def _unpacker_changed(self, name, old, new):
        if new.lower() == 'json':
            self.pack = json_packer
            self.unpack = json_unpacker
            self.packer = new
        elif new.lower() == 'pickle':
            self.pack = pickle_packer
            self.unpack = pickle_unpacker
            self.packer = new
        else:
            self.unpack = import_item(str(new))

    session = CUnicode(u'',
                       config=True,
                       help="""The UUID identifying this session.""")

    def _session_default(self):
        u = unicode_type(uuid.uuid4())
        self.bsession = u.encode('ascii')
        return u

    def _session_changed(self, name, old, new):
        self.bsession = self.session.encode('ascii')

    # bsession is the session as bytes
    bsession = CBytes(b'')

    username = Unicode(
        str_to_unicode(os.environ.get('USER', 'username')),
        help="""Username for the Session. Default is your system username.""",
        config=True)

    metadata = Dict(
        {},
        config=True,
        help=
        """Metadata dictionary, which serves as the default top-level metadata dict for each message."""
    )

    # if 0, no adapting to do.
    adapt_version = Integer(0)

    # message signature related traits:

    key = CBytes(b'',
                 config=True,
                 help="""execution key, for extra authentication.""")

    def _key_changed(self, name, old, new):
        if new:
            self.auth = hmac.HMAC(new, digestmod=self.digest_mod)
        else:
            self.auth = None

    signature_scheme = Unicode(
        'hmac-sha256',
        config=True,
        help="""The digest scheme used to construct the message signatures.
        Must have the form 'hmac-HASH'.""")

    def _signature_scheme_changed(self, name, old, new):
        if not new.startswith('hmac-'):
            raise TraitError(
                "signature_scheme must start with 'hmac-', got %r" % new)
        hash_name = new.split('-', 1)[1]
        try:
            self.digest_mod = getattr(hashlib, hash_name)
        except AttributeError:
            raise TraitError("hashlib has no such attribute: %s" % hash_name)

    digest_mod = Any()

    def _digest_mod_default(self):
        return hashlib.sha256

    auth = Instance(hmac.HMAC)

    digest_history = Set()
    digest_history_size = Integer(
        2**16,
        config=True,
        help="""The maximum number of digests to remember.
        
        The digest history will be culled when it exceeds this value.
        """)

    keyfile = Unicode('',
                      config=True,
                      help="""path to file containing execution key.""")

    def _keyfile_changed(self, name, old, new):
        with open(new, 'rb') as f:
            self.key = f.read().strip()

    # for protecting against sends from forks
    pid = Integer()

    # serialization traits:

    pack = Any(default_packer)  # the actual packer function

    def _pack_changed(self, name, old, new):
        if not callable(new):
            raise TypeError("packer must be callable, not %s" % type(new))

    unpack = Any(default_unpacker)  # the actual packer function

    def _unpack_changed(self, name, old, new):
        # unpacker is not checked - it is assumed to be
        if not callable(new):
            raise TypeError("unpacker must be callable, not %s" % type(new))

    # thresholds:
    copy_threshold = Integer(
        2**16,
        config=True,
        help=
        "Threshold (in bytes) beyond which a buffer should be sent without copying."
    )
    buffer_threshold = Integer(
        MAX_BYTES,
        config=True,
        help=
        "Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling."
    )
    item_threshold = Integer(
        MAX_ITEMS,
        config=True,
        help=
        """The maximum number of items for a container to be introspected for custom serialization.
        Containers larger than this are pickled outright.
        """)

    def __init__(self, **kwargs):
        """create a Session object

        Parameters
        ----------

        debug : bool
            whether to trigger extra debugging statements
        packer/unpacker : str : 'json', 'pickle' or import_string
            importstrings for methods to serialize message parts.  If just
            'json' or 'pickle', predefined JSON and pickle packers will be used.
            Otherwise, the entire importstring must be used.

            The functions must accept at least valid JSON input, and output
            *bytes*.

            For example, to use msgpack:
            packer = 'msgpack.packb', unpacker='msgpack.unpackb'
        pack/unpack : callables
            You can also set the pack/unpack callables for serialization
            directly.
        session : unicode (must be ascii)
            the ID of this Session object.  The default is to generate a new
            UUID.
        bsession : bytes
            The session as bytes
        username : unicode
            username added to message headers.  The default is to ask the OS.
        key : bytes
            The key used to initialize an HMAC signature.  If unset, messages
            will not be signed or checked.
        signature_scheme : str
            The message digest scheme. Currently must be of the form 'hmac-HASH',
            where 'HASH' is a hashing function available in Python's hashlib.
            The default is 'hmac-sha256'.
            This is ignored if 'key' is empty.
        keyfile : filepath
            The file containing a key.  If this is set, `key` will be
            initialized to the contents of the file.
        """
        super(Session, self).__init__(**kwargs)
        self._check_packers()
        self.none = self.pack({})
        # ensure self._session_default() if necessary, so bsession is defined:
        self.session
        self.pid = os.getpid()

    @property
    def msg_id(self):
        """always return new uuid"""
        return str(uuid.uuid4())

    def _check_packers(self):
        """check packers for datetime support."""
        pack = self.pack
        unpack = self.unpack

        # check simple serialization
        msg = dict(a=[1, 'hi'])
        try:
            packed = pack(msg)
        except Exception as e:
            msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}"
            if self.packer == 'json':
                jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
            else:
                jsonmsg = ""
            raise ValueError(
                msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg))

        # ensure packed message is bytes
        if not isinstance(packed, bytes):
            raise ValueError("message packed to %r, but bytes are required" %
                             type(packed))

        # check that unpack is pack's inverse
        try:
            unpacked = unpack(packed)
            assert unpacked == msg
        except Exception as e:
            msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}"
            if self.packer == 'json':
                jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
            else:
                jsonmsg = ""
            raise ValueError(
                msg.format(packer=self.packer,
                           unpacker=self.unpacker,
                           e=e,
                           jsonmsg=jsonmsg))

        # check datetime support
        msg = dict(t=datetime.now())
        try:
            unpacked = unpack(pack(msg))
            if isinstance(unpacked['t'], datetime):
                raise ValueError("Shouldn't deserialize to datetime")
        except Exception:
            self.pack = lambda o: pack(squash_dates(o))
            self.unpack = lambda s: unpack(s)

    def msg_header(self, msg_type):
        return msg_header(self.msg_id, msg_type, self.username, self.session)

    def msg(self,
            msg_type,
            content=None,
            parent=None,
            header=None,
            metadata=None):
        """Return the nested message dict.

        This format is different from what is sent over the wire. The
        serialize/unserialize methods converts this nested message dict to the wire
        format, which is a list of message parts.
        """
        msg = {}
        header = self.msg_header(msg_type) if header is None else header
        msg['header'] = header
        msg['msg_id'] = header['msg_id']
        msg['msg_type'] = header['msg_type']
        msg['parent_header'] = {} if parent is None else extract_header(parent)
        msg['content'] = {} if content is None else content
        msg['metadata'] = self.metadata.copy()
        if metadata is not None:
            msg['metadata'].update(metadata)
        return msg

    def sign(self, msg_list):
        """Sign a message with HMAC digest. If no auth, return b''.

        Parameters
        ----------
        msg_list : list
            The [p_header,p_parent,p_content] part of the message list.
        """
        if self.auth is None:
            return b''
        h = self.auth.copy()
        for m in msg_list:
            h.update(m)
        return str_to_bytes(h.hexdigest())

    def serialize(self, msg, ident=None):
        """Serialize the message components to bytes.

        This is roughly the inverse of unserialize. The serialize/unserialize
        methods work with full message lists, whereas pack/unpack work with
        the individual message parts in the message list.

        Parameters
        ----------
        msg : dict or Message
            The nexted message dict as returned by the self.msg method.

        Returns
        -------
        msg_list : list
            The list of bytes objects to be sent with the format::

                [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent,
                 p_metadata, p_content, buffer1, buffer2, ...]

            In this list, the ``p_*`` entities are the packed or serialized
            versions, so if JSON is used, these are utf8 encoded JSON strings.
        """
        content = msg.get('content', {})
        if content is None:
            content = self.none
        elif isinstance(content, dict):
            content = self.pack(content)
        elif isinstance(content, bytes):
            # content is already packed, as in a relayed message
            pass
        elif isinstance(content, unicode_type):
            # should be bytes, but JSON often spits out unicode
            content = content.encode('utf8')
        else:
            raise TypeError("Content incorrect type: %s" % type(content))

        real_message = [
            self.pack(msg['header']),
            self.pack(msg['parent_header']),
            self.pack(msg['metadata']),
            content,
        ]

        to_send = []

        if isinstance(ident, list):
            # accept list of idents
            to_send.extend(ident)
        elif ident is not None:
            to_send.append(ident)
        to_send.append(DELIM)

        signature = self.sign(real_message)
        to_send.append(signature)

        to_send.extend(real_message)

        return to_send

    def send(self,
             stream,
             msg_or_type,
             content=None,
             parent=None,
             ident=None,
             buffers=None,
             track=False,
             header=None,
             metadata=None):
        """Build and send a message via stream or socket.

        The message format used by this function internally is as follows:

        [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
         buffer1,buffer2,...]

        The serialize/unserialize methods convert the nested message dict into this
        format.

        Parameters
        ----------

        stream : zmq.Socket or ZMQStream
            The socket-like object used to send the data.
        msg_or_type : str or Message/dict
            Normally, msg_or_type will be a msg_type unless a message is being
            sent more than once. If a header is supplied, this can be set to
            None and the msg_type will be pulled from the header.

        content : dict or None
            The content of the message (ignored if msg_or_type is a message).
        header : dict or None
            The header dict for the message (ignored if msg_to_type is a message).
        parent : Message or dict or None
            The parent or parent header describing the parent of this message
            (ignored if msg_or_type is a message).
        ident : bytes or list of bytes
            The zmq.IDENTITY routing path.
        metadata : dict or None
            The metadata describing the message
        buffers : list or None
            The already-serialized buffers to be appended to the message.
        track : bool
            Whether to track.  Only for use with Sockets, because ZMQStream
            objects cannot track messages.
            

        Returns
        -------
        msg : dict
            The constructed message.
        """
        if not isinstance(stream, zmq.Socket):
            # ZMQStreams and dummy sockets do not support tracking.
            track = False

        if isinstance(msg_or_type, (Message, dict)):
            # We got a Message or message dict, not a msg_type so don't
            # build a new Message.
            msg = msg_or_type
        else:
            msg = self.msg(msg_or_type,
                           content=content,
                           parent=parent,
                           header=header,
                           metadata=metadata)
        if not os.getpid() == self.pid:
            io.rprint("WARNING: attempted to send message from fork")
            io.rprint(msg)
            return
        buffers = [] if buffers is None else buffers
        if self.adapt_version:
            msg = adapt(msg, self.adapt_version)
        to_send = self.serialize(msg, ident)
        to_send.extend(buffers)
        longest = max([len(s) for s in to_send])
        copy = (longest < self.copy_threshold)

        if buffers and track and not copy:
            # only really track when we are doing zero-copy buffers
            tracker = stream.send_multipart(to_send, copy=False, track=True)
        else:
            # use dummy tracker, which will be done immediately
            tracker = DONE
            stream.send_multipart(to_send, copy=copy)

        if self.debug:
            pprint.pprint(msg)
            pprint.pprint(to_send)
            pprint.pprint(buffers)

        msg['tracker'] = tracker

        return msg

    def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
        """Send a raw message via ident path.

        This method is used to send a already serialized message.

        Parameters
        ----------
        stream : ZMQStream or Socket
            The ZMQ stream or socket to use for sending the message.
        msg_list : list
            The serialized list of messages to send. This only includes the
            [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
            the message.
        ident : ident or list
            A single ident or a list of idents to use in sending.
        """
        to_send = []
        if isinstance(ident, bytes):
            ident = [ident]
        if ident is not None:
            to_send.extend(ident)

        to_send.append(DELIM)
        to_send.append(self.sign(msg_list))
        to_send.extend(msg_list)
        stream.send_multipart(to_send, flags, copy=copy)

    def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
        """Receive and unpack a message.

        Parameters
        ----------
        socket : ZMQStream or Socket
            The socket or stream to use in receiving.

        Returns
        -------
        [idents], msg
            [idents] is a list of idents and msg is a nested message dict of
            same format as self.msg returns.
        """
        if isinstance(socket, ZMQStream):
            socket = socket.socket
        try:
            msg_list = socket.recv_multipart(mode, copy=copy)
        except zmq.ZMQError as e:
            if e.errno == zmq.EAGAIN:
                # We can convert EAGAIN to None as we know in this case
                # recv_multipart won't return None.
                return None, None
            else:
                raise
        # split multipart message into identity list and message dict
        # invalid large messages can cause very expensive string comparisons
        idents, msg_list = self.feed_identities(msg_list, copy)
        try:
            return idents, self.unserialize(msg_list,
                                            content=content,
                                            copy=copy)
        except Exception as e:
            # TODO: handle it
            raise e

    def feed_identities(self, msg_list, copy=True):
        """Split the identities from the rest of the message.

        Feed until DELIM is reached, then return the prefix as idents and
        remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
        but that would be silly.

        Parameters
        ----------
        msg_list : a list of Message or bytes objects
            The message to be split.
        copy : bool
            flag determining whether the arguments are bytes or Messages

        Returns
        -------
        (idents, msg_list) : two lists
            idents will always be a list of bytes, each of which is a ZMQ
            identity. msg_list will be a list of bytes or zmq.Messages of the
            form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
            should be unpackable/unserializable via self.unserialize at this
            point.
        """
        if copy:
            idx = msg_list.index(DELIM)
            return msg_list[:idx], msg_list[idx + 1:]
        else:
            failed = True
            for idx, m in enumerate(msg_list):
                if m.bytes == DELIM:
                    failed = False
                    break
            if failed:
                raise ValueError("DELIM not in msg_list")
            idents, msg_list = msg_list[:idx], msg_list[idx + 1:]
            return [m.bytes for m in idents], msg_list

    def _add_digest(self, signature):
        """add a digest to history to protect against replay attacks"""
        if self.digest_history_size == 0:
            # no history, never add digests
            return

        self.digest_history.add(signature)
        if len(self.digest_history) > self.digest_history_size:
            # threshold reached, cull 10%
            self._cull_digest_history()

    def _cull_digest_history(self):
        """cull the digest history
        
        Removes a randomly selected 10% of the digest history
        """
        current = len(self.digest_history)
        n_to_cull = max(int(current // 10), current - self.digest_history_size)
        if n_to_cull >= current:
            self.digest_history = set()
            return
        to_cull = random.sample(self.digest_history, n_to_cull)
        self.digest_history.difference_update(to_cull)

    def unserialize(self, msg_list, content=True, copy=True):
        """Unserialize a msg_list to a nested message dict.

        This is roughly the inverse of serialize. The serialize/unserialize
        methods work with full message lists, whereas pack/unpack work with
        the individual message parts in the message list.

        Parameters
        ----------
        msg_list : list of bytes or Message objects
            The list of message parts of the form [HMAC,p_header,p_parent,
            p_metadata,p_content,buffer1,buffer2,...].
        content : bool (True)
            Whether to unpack the content dict (True), or leave it packed
            (False).
        copy : bool (True)
            Whether to return the bytes (True), or the non-copying Message
            object in each place (False).

        Returns
        -------
        msg : dict
            The nested message dict with top-level keys [header, parent_header,
            content, buffers].
        """
        minlen = 5
        message = {}
        if not copy:
            for i in range(minlen):
                msg_list[i] = msg_list[i].bytes
        if self.auth is not None:
            signature = msg_list[0]
            if not signature:
                raise ValueError("Unsigned Message")
            if signature in self.digest_history:
                raise ValueError("Duplicate Signature: %r" % signature)
            self._add_digest(signature)
            check = self.sign(msg_list[1:5])
            if not signature == check:
                raise ValueError("Invalid Signature: %r" % signature)
        if not len(msg_list) >= minlen:
            raise TypeError(
                "malformed message, must have at least %i elements" % minlen)
        header = self.unpack(msg_list[1])
        message['header'] = extract_dates(header)
        message['msg_id'] = header['msg_id']
        message['msg_type'] = header['msg_type']
        message['parent_header'] = extract_dates(self.unpack(msg_list[2]))
        message['metadata'] = self.unpack(msg_list[3])
        if content:
            message['content'] = self.unpack(msg_list[4])
        else:
            message['content'] = msg_list[4]

        message['buffers'] = msg_list[5:]
        # print("received: %s: %s\n    %s" % (message['msg_type'], message['header'], message['content']))
        # adapt to the current version
        return adapt(message)
Esempio n. 4
0
class BaseDB(LoggingConfigurable):
    """Empty Parent class so traitlets work on DB."""
    # base configurable traits:
    session = Unicode("")
Esempio n. 5
0
class HistoryAccessor(Configurable):
    """Access the history database without adding to it.
    
    This is intended for use by standalone history tools. IPython shells use
    HistoryManager, below, which is a subclass of this."""

    # String holding the path to the history file
    hist_file = Unicode(
        config=True,
        help="""Path to file to use for SQLite history database.
        
        By default, IPython will put the history database in the IPython
        profile directory.  If you would rather share one history among
        profiles, you can set this value in each, so that they are consistent.
        
        Due to an issue with fcntl, SQLite is known to misbehave on some NFS
        mounts.  If you see IPython hanging, try setting this to something on a
        local disk, e.g::
        
            ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite
        
        """)

    enabled = Bool(True,
                   config=True,
                   help="""enable the SQLite history
        
        set enabled=False to disable the SQLite history,
        in which case there will be no stored history, no SQLite connection,
        and no background saving thread.  This may be necessary in some
        threaded environments where IPython is embedded.
        """)

    connection_options = Dict(
        config=True,
        help="""Options for configuring the SQLite connection
        
        These options are passed as keyword args to sqlite3.connect
        when establishing database conenctions.
        """)

    # The SQLite database
    db = Any()

    def _db_changed(self, name, old, new):
        """validate the db, since it can be an Instance of two different types"""
        connection_types = (DummyDB, )
        if sqlite3 is not None:
            connection_types = (DummyDB, sqlite3.Connection)
        if not isinstance(new, connection_types):
            msg = "%s.db must be sqlite3 Connection or DummyDB, not %r" % \
                    (self.__class__.__name__, new)
            raise TraitError(msg)

    def __init__(self, profile='default', hist_file=u'', **traits):
        """Create a new history accessor.
        
        Parameters
        ----------
        profile : str
          The name of the profile from which to open history.
        hist_file : str
          Path to an SQLite history database stored by IPython. If specified,
          hist_file overrides profile.
        config :
          Config object. hist_file can also be set through this.
        """
        # We need a pointer back to the shell for various tasks.
        super(HistoryAccessor, self).__init__(**traits)
        # defer setting hist_file from kwarg until after init,
        # otherwise the default kwarg value would clobber any value
        # set by config
        if hist_file:
            self.hist_file = hist_file

        if self.hist_file == u'':
            # No one has set the hist_file, yet.
            self.hist_file = self._get_hist_file_name(profile)

        if sqlite3 is None and self.enabled:
            warn(
                "IPython History requires SQLite, your history will not be saved"
            )
            self.enabled = False

        self.init_db()

    def _get_hist_file_name(self, profile='default'):
        """Find the history file for the given profile name.
        
        This is overridden by the HistoryManager subclass, to use the shell's
        active profile.
        
        Parameters
        ----------
        profile : str
          The name of a profile which has a history file.
        """
        return os.path.join(locate_profile(profile), 'history.sqlite')

    @catch_corrupt_db
    def init_db(self):
        """Connect to the database, and create tables if necessary."""
        if not self.enabled:
            self.db = DummyDB()
            return

        # use detect_types so that timestamps return datetime objects
        kwargs = dict(detect_types=sqlite3.PARSE_DECLTYPES
                      | sqlite3.PARSE_COLNAMES)
        kwargs.update(self.connection_options)
        self.db = sqlite3.connect(self.hist_file, **kwargs)
        self.db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer
                        primary key autoincrement, start timestamp,
                        end timestamp, num_cmds integer, remark text)""")
        self.db.execute("""CREATE TABLE IF NOT EXISTS history
                (session integer, line integer, source text, source_raw text,
                PRIMARY KEY (session, line))""")
        # Output history is optional, but ensure the table's there so it can be
        # enabled later.
        self.db.execute("""CREATE TABLE IF NOT EXISTS output_history
                        (session integer, line integer, output text,
                        PRIMARY KEY (session, line))""")
        self.db.commit()

    def writeout_cache(self):
        """Overridden by HistoryManager to dump the cache before certain
        database lookups."""
        pass

    ## -------------------------------
    ## Methods for retrieving history:
    ## -------------------------------
    def _run_sql(self, sql, params, raw=True, output=False):
        """Prepares and runs an SQL query for the history database.

        Parameters
        ----------
        sql : str
          Any filtering expressions to go after SELECT ... FROM ...
        params : tuple
          Parameters passed to the SQL query (to replace "?")
        raw, output : bool
          See :meth:`get_range`

        Returns
        -------
        Tuples as :meth:`get_range`
        """
        toget = 'source_raw' if raw else 'source'
        sqlfrom = "history"
        if output:
            sqlfrom = "history LEFT JOIN output_history USING (session, line)"
            toget = "history.%s, output_history.output" % toget
        cur = self.db.execute("SELECT session, line, %s FROM %s " %\
                                (toget, sqlfrom) + sql, params)
        if output:  # Regroup into 3-tuples, and parse JSON
            return ((ses, lin, (inp, out)) for ses, lin, inp, out in cur)
        return cur

    @needs_sqlite
    @catch_corrupt_db
    def get_session_info(self, session=0):
        """get info about a session

        Parameters
        ----------

        session : int
            Session number to retrieve. The current session is 0, and negative
            numbers count back from current session, so -1 is previous session.

        Returns
        -------

        (session_id [int], start [datetime], end [datetime], num_cmds [int],
        remark [unicode])

        Sessions that are running or did not exit cleanly will have `end=None`
        and `num_cmds=None`.

        """

        if session <= 0:
            session += self.session_number

        query = "SELECT * from sessions where session == ?"
        return self.db.execute(query, (session, )).fetchone()

    @catch_corrupt_db
    def get_tail(self, n=10, raw=True, output=False, include_latest=False):
        """Get the last n lines from the history database.

        Parameters
        ----------
        n : int
          The number of lines to get
        raw, output : bool
          See :meth:`get_range`
        include_latest : bool
          If False (default), n+1 lines are fetched, and the latest one
          is discarded. This is intended to be used where the function
          is called by a user command, which it should not return.

        Returns
        -------
        Tuples as :meth:`get_range`
        """
        self.writeout_cache()
        if not include_latest:
            n += 1
        cur = self._run_sql("ORDER BY session DESC, line DESC LIMIT ?", (n, ),
                            raw=raw,
                            output=output)
        if not include_latest:
            return reversed(list(cur)[1:])
        return reversed(list(cur))

    @catch_corrupt_db
    def search(self,
               pattern="*",
               raw=True,
               search_raw=True,
               output=False,
               n=None,
               unique=False):
        """Search the database using unix glob-style matching (wildcards
        * and ?).

        Parameters
        ----------
        pattern : str
          The wildcarded pattern to match when searching
        search_raw : bool
          If True, search the raw input, otherwise, the parsed input
        raw, output : bool
          See :meth:`get_range`
        n : None or int
          If an integer is given, it defines the limit of
          returned entries.
        unique : bool
          When it is true, return only unique entries.

        Returns
        -------
        Tuples as :meth:`get_range`
        """
        tosearch = "source_raw" if search_raw else "source"
        if output:
            tosearch = "history." + tosearch
        self.writeout_cache()
        sqlform = "WHERE %s GLOB ?" % tosearch
        params = (pattern, )
        if unique:
            sqlform += ' GROUP BY {0}'.format(tosearch)
        if n is not None:
            sqlform += " ORDER BY session DESC, line DESC LIMIT ?"
            params += (n, )
        elif unique:
            sqlform += " ORDER BY session, line"
        cur = self._run_sql(sqlform, params, raw=raw, output=output)
        if n is not None:
            return reversed(list(cur))
        return cur

    @catch_corrupt_db
    def get_range(self, session, start=1, stop=None, raw=True, output=False):
        """Retrieve input by session.

        Parameters
        ----------
        session : int
            Session number to retrieve.
        start : int
            First line to retrieve.
        stop : int
            End of line range (excluded from output itself). If None, retrieve
            to the end of the session.
        raw : bool
            If True, return untranslated input
        output : bool
            If True, attempt to include output. This will be 'real' Python
            objects for the current session, or text reprs from previous
            sessions if db_log_output was enabled at the time. Where no output
            is found, None is used.

        Returns
        -------
        An iterator over the desired lines. Each line is a 3-tuple, either
        (session, line, input) if output is False, or
        (session, line, (input, output)) if output is True.
        """
        if stop:
            lineclause = "line >= ? AND line < ?"
            params = (session, start, stop)
        else:
            lineclause = "line>=?"
            params = (session, start)

        return self._run_sql("WHERE session==? AND %s" % lineclause,
                             params,
                             raw=raw,
                             output=output)

    def get_range_by_str(self, rangestr, raw=True, output=False):
        """Get lines of history from a string of ranges, as used by magic
        commands %hist, %save, %macro, etc.

        Parameters
        ----------
        rangestr : str
          A string specifying ranges, e.g. "5 ~2/1-4". See
          :func:`magic_history` for full details.
        raw, output : bool
          As :meth:`get_range`

        Returns
        -------
        Tuples as :meth:`get_range`
        """
        for sess, s, e in extract_hist_ranges(rangestr):
            for line in self.get_range(sess, s, e, raw=raw, output=output):
                yield line
Esempio n. 6
0
class BoundedFloatTextWidget(_BoundedFloatWidget):
    _view_name = Unicode('FloatTextView', sync=True)
Esempio n. 7
0
class NotebookApp(BaseIPythonApplication):

    name = 'ipython-notebook'
    default_config_file_name='ipython_notebook_config.py'
    
    description = """
        The IPython HTML Notebook.
        
        This launches a Tornado based HTML Notebook Server that serves up an
        HTML5/Javascript Notebook client.
    """
    examples = _examples
    
    classes = IPythonConsoleApp.classes + [MappingKernelManager, NotebookManager]
    flags = Dict(flags)
    aliases = Dict(aliases)

    kernel_argv = List(Unicode)

    log_level = Enum((0,10,20,30,40,50,'DEBUG','INFO','WARN','ERROR','CRITICAL'),
                    default_value=logging.INFO,
                    config=True,
                    help="Set the log level by value or name.")

    # create requested profiles by default, if they don't exist:
    auto_create = Bool(True)

    # file to be opened in the notebook server
    file_to_run = Unicode('')

    # Network related information.

    ip = Unicode(LOCALHOST, config=True,
        help="The IP address the notebook server will listen on."
    )

    def _ip_changed(self, name, old, new):
        if new == u'*': self.ip = u''

    port = Integer(8888, config=True,
        help="The port the notebook server will listen on."
    )
    port_retries = Integer(50, config=True,
        help="The number of additional ports to try if the specified port is not available."
    )

    certfile = Unicode(u'', config=True, 
        help="""The full path to an SSL/TLS certificate file."""
    )
    
    keyfile = Unicode(u'', config=True, 
        help="""The full path to a private key file for usage with SSL/TLS."""
    )

    password = Unicode(u'', config=True,
                      help="""Hashed password to use for web authentication.

                      To generate, type in a python/IPython shell:

                        from IPython.lib import passwd; passwd()

                      The string should be of the form type:salt:hashed-password.
                      """
    )

    open_browser = Bool(True, config=True,
                        help="""Whether to open in a browser after starting.
                        The specific browser used is platform dependent and
                        determined by the python standard library `webbrowser`
                        module, unless it is overridden using the --browser
                        (NotebookApp.browser) configuration option.
                        """)

    browser = Unicode(u'', config=True,
                      help="""Specify what command to use to invoke a web
                      browser when opening the notebook. If not specified, the
                      default browser will be determined by the `webbrowser`
                      standard library module, which allows setting of the
                      BROWSER environment variable to override it.
                      """)
    
    read_only = Bool(False, config=True,
        help="Whether to prevent editing/execution of notebooks."
    )
    
    webapp_settings = Dict(config=True,
            help="Supply overrides for the tornado.web.Application that the "
                 "IPython notebook uses.")
    
    enable_mathjax = Bool(True, config=True,
        help="""Whether to enable MathJax for typesetting math/TeX

        MathJax is the javascript library IPython uses to render math/LaTeX. It is
        very large, so you may want to disable it if you have a slow internet
        connection, or for offline use of the notebook.

        When disabled, equations etc. will appear as their untransformed TeX source.
        """
    )
    def _enable_mathjax_changed(self, name, old, new):
        """set mathjax url to empty if mathjax is disabled"""
        if not new:
            self.mathjax_url = u''

    base_project_url = Unicode('/', config=True,
                               help='''The base URL for the notebook server''')
    base_kernel_url = Unicode('/', config=True,
                               help='''The base URL for the kernel server''')
    websocket_host = Unicode("", config=True,
        help="""The hostname for the websocket server."""
    )

    mathjax_url = Unicode("", config=True,
        help="""The url for MathJax.js."""
    )
    def _mathjax_url_default(self):
        if not self.enable_mathjax:
            return u''
        static_path = self.webapp_settings.get("static_path", os.path.join(os.path.dirname(__file__), "static"))
        static_url_prefix = self.webapp_settings.get("static_url_prefix",
                                                     "/static/")
        if os.path.exists(os.path.join(static_path, 'mathjax', "MathJax.js")):
            self.log.info("Using local MathJax")
            return static_url_prefix+u"mathjax/MathJax.js"
        else:
            if self.certfile:
                # HTTPS: load from Rackspace CDN, because SSL certificate requires it
                base = u"https://c328740.ssl.cf1.rackcdn.com"
            else:
                base = u"http://cdn.mathjax.org"
            
            url = base + u"/mathjax/latest/MathJax.js"
            self.log.info("Using MathJax from CDN: %s", url)
            return url
    
    def _mathjax_url_changed(self, name, old, new):
        if new and not self.enable_mathjax:
            # enable_mathjax=False overrides mathjax_url
            self.mathjax_url = u''
        else:
            self.log.info("Using MathJax: %s", new)

    def parse_command_line(self, argv=None):
        super(NotebookApp, self).parse_command_line(argv)
        if argv is None:
            argv = sys.argv[1:]

        # Scrub frontend-specific flags
        self.kernel_argv = swallow_argv(argv, notebook_aliases, notebook_flags)
        # Kernel should inherit default config file from frontend
        self.kernel_argv.append("--KernelApp.parent_appname='%s'"%self.name)

        if self.extra_args:
            f = os.path.abspath(self.extra_args[0])
            if os.path.isdir(f):
                nbdir = f
            else:
                self.file_to_run = f
                nbdir = os.path.dirname(f)
            self.config.NotebookManager.notebook_dir = nbdir

    def init_configurables(self):
        # force Session default to be secure
        default_secure(self.config)
        self.kernel_manager = MappingKernelManager(
            config=self.config, log=self.log, kernel_argv=self.kernel_argv,
            connection_dir = self.profile_dir.security_dir,
        )
        self.notebook_manager = NotebookManager(config=self.config, log=self.log)
        self.log.info("Serving notebooks from %s", self.notebook_manager.notebook_dir)
        self.notebook_manager.list_notebooks()
        self.cluster_manager = ClusterManager(config=self.config, log=self.log)
        self.cluster_manager.update_profiles()

    def init_logging(self):
        # This prevents double log messages because tornado use a root logger that
        # self.log is a child of. The logging module dipatches log messages to a log
        # and all of its ancenstors until propagate is set to False.
        self.log.propagate = False
    
    def init_webapp(self):
        """initialize tornado webapp and httpserver"""
        self.web_app = NotebookWebApplication(
            self, self.kernel_manager, self.notebook_manager, 
            self.cluster_manager, self.log,
            self.base_project_url, self.webapp_settings
        )
        if self.certfile:
            ssl_options = dict(certfile=self.certfile)
            if self.keyfile:
                ssl_options['keyfile'] = self.keyfile
        else:
            ssl_options = None
        self.web_app.password = self.password
        self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options)
        if ssl_options is None and not self.ip and not (self.read_only and not self.password):
            self.log.critical('WARNING: the notebook server is listening on all IP addresses '
                              'but not using any encryption or authentication. This is highly '
                              'insecure and not recommended.')

        success = None
        for port in random_ports(self.port, self.port_retries+1):
            try:
                self.http_server.listen(port, self.ip)
            except socket.error, e:
                if e.errno != errno.EADDRINUSE:
                    raise
                self.log.info('The port %i is already in use, trying another random port.' % port)
            else:
                self.port = port
                success = True
                break
        if not success:
            self.log.critical('ERROR: the notebook server could not be started because '
                              'no available port could be found.')
            self.exit(1)
Esempio n. 8
0
class SVG2PDFPreprocessor(ConvertFiguresPreprocessor):
    """
    Converts all of the outputs in a notebook from SVG to PDF.
    """
    def _from_format_default(self):
        return 'image/svg+xml'

    def _to_format_default(self):
        return 'application/pdf'

    command = Unicode(config=True,
                      help="""The command to use for converting SVG to PDF
        
        This string is a template, which will be formatted with the keys
        to_filename and from_filename.
        
        The conversion call must read the SVG from {from_flename},
        and write a PDF to {to_filename}.
        """)

    def _command_default(self):
        return self.inkscape + \
               ' --without-gui --export-pdf="{to_filename}" "{from_filename}"'

    inkscape = Unicode(config=True, help="The path to Inkscape, if necessary")

    def _inkscape_default(self):
        if sys.platform == "darwin":
            if os.path.isfile(INKSCAPE_APP):
                return INKSCAPE_APP
        return "inkscape"

    def convert_figure(self, data_format, data):
        """
        Convert a single SVG figure to PDF.  Returns converted data.
        """

        #Work in a temporary directory
        with TemporaryDirectory() as tmpdir:

            #Write fig to temp file
            input_filename = os.path.join(tmpdir, 'figure.svg')
            # SVG data is unicode text
            with io.open(input_filename, 'w', encoding='utf8') as f:
                f.write(cast_unicode_py2(data))

            #Call conversion application
            output_filename = os.path.join(tmpdir, 'figure.pdf')
            shell = self.command.format(from_filename=input_filename,
                                        to_filename=output_filename)
            subprocess.call(
                shell, shell=True)  #Shell=True okay since input is trusted.

            #Read output from drive
            # return value expects a filename
            if os.path.isfile(output_filename):
                with open(output_filename, 'rb') as f:
                    # PDF is a nb supported binary, data type, so base64 encode.
                    return base64.encodestring(f.read())
            else:
                raise TypeError("Inkscape svg to pdf conversion failed")
Esempio n. 9
0
class IPEngineApp(BaseParallelApplication):

    name = 'ipengine'
    description = _description
    examples = _examples
    classes = List(
        [ZMQInteractiveShell, ProfileDir, Session, EngineFactory, Kernel, MPI])

    startup_script = Unicode(u'',
                             config=True,
                             help='specify a script to be run at startup')
    startup_command = Unicode('',
                              config=True,
                              help='specify a command to be run at startup')

    url_file = Unicode(
        u'',
        config=True,
        help=
        """The full location of the file containing the connection information for
        the controller. If this is not given, the file must be in the
        security directory of the cluster directory.  This location is
        resolved using the `profile` or `profile_dir` options.""",
    )
    wait_for_url_file = Float(
        5,
        config=True,
        help="""The maximum number of seconds to wait for url_file to exist.
        This is useful for batch-systems and shared-filesystems where the
        controller and engine are started at the same time and it
        may take a moment for the controller to write the connector files.""")

    url_file_name = Unicode(u'ipcontroller-engine.json', config=True)

    def _cluster_id_changed(self, name, old, new):
        if new:
            base = 'ipcontroller-%s' % new
        else:
            base = 'ipcontroller'
        self.url_file_name = "%s-engine.json" % base

    log_url = Unicode(
        '',
        config=True,
        help="""The URL for the iploggerapp instance, for forwarding
        logging to a central location.""")

    # an IPKernelApp instance, used to setup listening for shell frontends
    kernel_app = Instance(IPKernelApp)

    aliases = Dict(aliases)
    flags = Dict(flags)

    @property
    def kernel(self):
        """allow access to the Kernel object, so I look like IPKernelApp"""
        return self.engine.kernel

    def find_url_file(self):
        """Set the url file.

        Here we don't try to actually see if it exists for is valid as that
        is hadled by the connection logic.
        """
        config = self.config
        # Find the actual controller key file
        if not self.url_file:
            self.url_file = os.path.join(self.profile_dir.security_dir,
                                         self.url_file_name)

    def load_connector_file(self):
        """load config from a JSON connector file,
        at a *lower* priority than command-line/config files.
        """

        self.log.info("Loading url_file %r", self.url_file)
        config = self.config

        with open(self.url_file) as f:
            d = json.loads(f.read())

        # allow hand-override of location for disambiguation
        # and ssh-server
        try:
            config.EngineFactory.location
        except AttributeError:
            config.EngineFactory.location = d['location']

        try:
            config.EngineFactory.sshserver
        except AttributeError:
            config.EngineFactory.sshserver = d.get('ssh')

        location = config.EngineFactory.location

        proto, ip = d['interface'].split('://')
        ip = disambiguate_ip_address(ip, location)
        d['interface'] = '%s://%s' % (proto, ip)

        # DO NOT allow override of basic URLs, serialization, or key
        # JSON file takes top priority there
        config.Session.key = cast_bytes(d['key'])
        config.Session.signature_scheme = d['signature_scheme']

        config.EngineFactory.url = d['interface'] + ':%i' % d['registration']

        config.Session.packer = d['pack']
        config.Session.unpacker = d['unpack']

        self.log.debug("Config changed:")
        self.log.debug("%r", config)
        self.connection_info = d

    def bind_kernel(self, **kwargs):
        """Promote engine to listening kernel, accessible to frontends."""
        if self.kernel_app is not None:
            return

        self.log.info(
            "Opening ports for direct connections as an IPython kernel")

        kernel = self.kernel

        kwargs.setdefault('config', self.config)
        kwargs.setdefault('log', self.log)
        kwargs.setdefault('profile_dir', self.profile_dir)
        kwargs.setdefault('session', self.engine.session)

        app = self.kernel_app = IPKernelApp(**kwargs)

        # allow IPKernelApp.instance():
        IPKernelApp._instance = app

        app.init_connection_file()
        # relevant contents of init_sockets:

        app.shell_port = app._bind_socket(kernel.shell_streams[0],
                                          app.shell_port)
        app.log.debug("shell ROUTER Channel on port: %i", app.shell_port)

        app.iopub_port = app._bind_socket(kernel.iopub_socket, app.iopub_port)
        app.log.debug("iopub PUB Channel on port: %i", app.iopub_port)

        kernel.stdin_socket = self.engine.context.socket(zmq.ROUTER)
        app.stdin_port = app._bind_socket(kernel.stdin_socket, app.stdin_port)
        app.log.debug("stdin ROUTER Channel on port: %i", app.stdin_port)

        # start the heartbeat, and log connection info:

        app.init_heartbeat()

        app.log_connection_info()
        app.write_connection_file()

    def init_engine(self):
        # This is the working dir by now.
        sys.path.insert(0, '')
        config = self.config
        # print config
        self.find_url_file()

        # was the url manually specified?
        keys = set(self.config.EngineFactory.keys())
        keys = keys.union(set(self.config.RegistrationFactory.keys()))

        if keys.intersection(set(['ip', 'url', 'port'])):
            # Connection info was specified, don't wait for the file
            url_specified = True
            self.wait_for_url_file = 0
        else:
            url_specified = False

        if self.wait_for_url_file and not os.path.exists(self.url_file):
            self.log.warn("url_file %r not found", self.url_file)
            self.log.warn("Waiting up to %.1f seconds for it to arrive.",
                          self.wait_for_url_file)
            tic = time.time()
            while not os.path.exists(self.url_file) and (
                    time.time() - tic < self.wait_for_url_file):
                # wait for url_file to exist, or until time limit
                time.sleep(0.1)

        if os.path.exists(self.url_file):
            self.load_connector_file()
        elif not url_specified:
            self.log.fatal("Fatal: url file never arrived: %s", self.url_file)
            self.exit(1)

        try:
            exec_lines = config.IPKernelApp.exec_lines
        except AttributeError:
            try:
                exec_lines = config.InteractiveShellApp.exec_lines
            except AttributeError:
                exec_lines = config.IPKernelApp.exec_lines = []
        try:
            exec_files = config.IPKernelApp.exec_files
        except AttributeError:
            try:
                exec_files = config.InteractiveShellApp.exec_files
            except AttributeError:
                exec_files = config.IPKernelApp.exec_files = []

        if self.startup_script:
            exec_files.append(self.startup_script)
        if self.startup_command:
            exec_lines.append(self.startup_command)

        # Create the underlying shell class and Engine
        # shell_class = import_item(self.master_config.Global.shell_class)
        # print self.config
        try:
            self.engine = EngineFactory(
                config=config,
                log=self.log,
                connection_info=self.connection_info,
            )
        except:
            self.log.error("Couldn't start the Engine", exc_info=True)
            self.exit(1)

    def forward_logging(self):
        if self.log_url:
            self.log.info("Forwarding logging to %s", self.log_url)
            context = self.engine.context
            lsock = context.socket(zmq.PUB)
            lsock.connect(self.log_url)
            handler = EnginePUBHandler(self.engine, lsock)
            handler.setLevel(self.log_level)
            self.log.addHandler(handler)

    def init_mpi(self):
        global mpi
        self.mpi = MPI(parent=self)

        mpi_import_statement = self.mpi.init_script
        if mpi_import_statement:
            try:
                self.log.info("Initializing MPI:")
                self.log.info(mpi_import_statement)
                exec mpi_import_statement in globals()
            except:
                mpi = None
        else:
            mpi = None

    @catch_config_error
    def initialize(self, argv=None):
        super(IPEngineApp, self).initialize(argv)
        self.init_mpi()
        self.init_engine()
        self.forward_logging()

    def start(self):
        self.engine.start()
        try:
            self.engine.loop.start()
        except KeyboardInterrupt:
            self.log.critical("Engine Interrupted, shutting down...\n")
Esempio n. 10
0
class Latex(_String):
    """Renders math inside the string `value` as Latex (requires $ $ or $$ $$ 
    and similar latex tags)."""
    _view_name = Unicode('LatexView', sync=True)
Esempio n. 11
0
class Textarea(_String):
    """Multiline text area widget."""
    _view_name = Unicode('TextareaView', sync=True)

    def scroll_to_bottom(self):
        self.send({"method": "scroll_to_bottom"})
Esempio n. 12
0
class HTML(_String):
    """Renders the string `value` as HTML."""
    _view_name = Unicode('HTMLView', sync=True)
Esempio n. 13
0
class ProfileList(Application):
    name = 'ipython-profile'
    description = list_help
    examples = _list_examples

    aliases = Dict({
        'ipython-dir': 'ProfileList.ipython_dir',
        'log-level': 'Application.log_level',
    })
    flags = Dict(
        dict(debug=({
            'Application': {
                'log_level': 0
            }
        }, "Set Application.log_level to 0, maximizing log output.")))

    ipython_dir = Unicode(get_ipython_dir(),
                          config=True,
                          help="""
        The name of the IPython directory. This directory is used for logging
        configuration (through profiles), history storage, etc. The default
        is usually $HOME/.ipython. This options can also be specified through
        the environment variable IPYTHONDIR.
        """)

    def _print_profiles(self, profiles):
        """print list of profiles, indented."""
        for profile in profiles:
            print('    %s' % profile)

    def list_profile_dirs(self):
        profiles = list_bundled_profiles()
        if profiles:
            print()
            print("Available profiles in IPython:")
            self._print_profiles(profiles)
            print()
            print("    The first request for a bundled profile will copy it")
            print("    into your IPython directory (%s)," % self.ipython_dir)
            print("    where you can customize it.")

        profiles = list_profiles_in(self.ipython_dir)
        if profiles:
            print()
            print("Available profiles in %s:" % self.ipython_dir)
            self._print_profiles(profiles)

        profiles = list_profiles_in(os.getcwd())
        if profiles:
            print()
            print("Available profiles in current directory (%s):" %
                  os.getcwd())
            self._print_profiles(profiles)

        print()
        print("To use any of the above profiles, start IPython with:")
        print("    ipython --profile=<name>")
        print()

    def start(self):
        self.list_profile_dirs()
Esempio n. 14
0
class IPythonConsoleApp(ConnectionFileMixin):
    name = 'ipython-console-mixin'

    description = """
        The IPython Mixin Console.
        
        This class contains the common portions of console client (QtConsole,
        ZMQ-based terminal console, etc).  It is not a full console, in that
        launched terminal subprocesses will not be able to accept input.
        
        The Console using this mixing supports various extra features beyond
        the single-process Terminal IPython shell, such as connecting to
        existing kernel, via:
        
            ipython <appname> --existing
        
        as well as tunnel via SSH
        
    """

    classes = classes
    flags = Dict(flags)
    aliases = Dict(aliases)
    kernel_manager_class = KernelManager
    kernel_client_class = BlockingKernelClient

    kernel_argv = List(Unicode)
    # frontend flags&aliases to be stripped when building kernel_argv
    frontend_flags = Any(app_flags)
    frontend_aliases = Any(app_aliases)

    # create requested profiles by default, if they don't exist:
    auto_create = CBool(True)
    # connection info:

    sshserver = Unicode(
        '',
        config=True,
        help="""The SSH server to use to connect to the kernel.""")
    sshkey = Unicode(
        '',
        config=True,
        help="""Path to the ssh key to use for logging in to the ssh server."""
    )

    def _connection_file_default(self):
        return 'kernel-%i.json' % os.getpid()

    existing = CUnicode('',
                        config=True,
                        help="""Connect to an already running kernel""")

    kernel_name = Unicode('python',
                          config=True,
                          help="""The name of the default kernel to start.""")

    confirm_exit = CBool(
        True,
        config=True,
        help="""
        Set to display confirmation dialog on exit. You can always use 'exit' or 'quit',
        to force a direct exit without any confirmation.""",
    )

    def build_kernel_argv(self, argv=None):
        """build argv to be passed to kernel subprocess"""
        if argv is None:
            argv = sys.argv[1:]
        self.kernel_argv = swallow_argv(argv, self.frontend_aliases,
                                        self.frontend_flags)
        # kernel should inherit default config file from frontend
        self.kernel_argv.append("--IPKernelApp.parent_appname='%s'" %
                                self.name)

    def init_connection_file(self):
        """find the connection file, and load the info if found.
        
        The current working directory and the current profile's security
        directory will be searched for the file if it is not given by
        absolute path.
        
        When attempting to connect to an existing kernel and the `--existing`
        argument does not match an existing file, it will be interpreted as a
        fileglob, and the matching file in the current profile's security dir
        with the latest access time will be used.
        
        After this method is called, self.connection_file contains the *full path*
        to the connection file, never just its name.
        """
        if self.existing:
            try:
                cf = find_connection_file(self.existing)
            except Exception:
                self.log.critical(
                    "Could not find existing kernel connection file %s",
                    self.existing)
                self.exit(1)
            self.log.debug("Connecting to existing kernel: %s" % cf)
            self.connection_file = cf
        else:
            # not existing, check if we are going to write the file
            # and ensure that self.connection_file is a full path, not just the shortname
            try:
                cf = find_connection_file(self.connection_file)
            except Exception:
                # file might not exist
                if self.connection_file == os.path.basename(
                        self.connection_file):
                    # just shortname, put it in security dir
                    cf = os.path.join(self.profile_dir.security_dir,
                                      self.connection_file)
                else:
                    cf = self.connection_file
                self.connection_file = cf
        try:
            self.connection_file = filefind(
                self.connection_file, ['.', self.profile_dir.security_dir])
        except IOError:
            self.log.debug("Connection File not found: %s",
                           self.connection_file)
            return

        # should load_connection_file only be used for existing?
        # as it is now, this allows reusing ports if an existing
        # file is requested
        try:
            self.load_connection_file()
        except Exception:
            self.log.error("Failed to load connection file: %r",
                           self.connection_file,
                           exc_info=True)
            self.exit(1)

    def init_ssh(self):
        """set up ssh tunnels, if needed."""
        if not self.existing or (not self.sshserver and not self.sshkey):
            return
        self.load_connection_file()

        transport = self.transport
        ip = self.ip

        if transport != 'tcp':
            self.log.error("Can only use ssh tunnels with TCP sockets, not %s",
                           transport)
            sys.exit(-1)

        if self.sshkey and not self.sshserver:
            # specifying just the key implies that we are connecting directly
            self.sshserver = ip
            ip = localhost()

        # build connection dict for tunnels:
        info = dict(ip=ip,
                    shell_port=self.shell_port,
                    iopub_port=self.iopub_port,
                    stdin_port=self.stdin_port,
                    hb_port=self.hb_port)

        self.log.info("Forwarding connections to %s via %s" %
                      (ip, self.sshserver))

        # tunnels return a new set of ports, which will be on localhost:
        self.ip = localhost()
        try:
            newports = tunnel_to_kernel(info, self.sshserver, self.sshkey)
        except:
            # even catch KeyboardInterrupt
            self.log.error("Could not setup tunnels", exc_info=True)
            self.exit(1)

        self.shell_port, self.iopub_port, self.stdin_port, self.hb_port = newports

        cf = self.connection_file
        base, ext = os.path.splitext(cf)
        base = os.path.basename(base)
        self.connection_file = os.path.basename(base) + '-ssh' + ext
        self.log.info("To connect another client via this tunnel, use:")
        self.log.info("--existing %s" % self.connection_file)

    def _new_connection_file(self):
        cf = ''
        while not cf:
            # we don't need a 128b id to distinguish kernels, use more readable
            # 48b node segment (12 hex chars).  Users running more than 32k simultaneous
            # kernels can subclass.
            ident = str(uuid.uuid4()).split('-')[-1]
            cf = os.path.join(self.profile_dir.security_dir,
                              'kernel-%s.json' % ident)
            # only keep if it's actually new.  Protect against unlikely collision
            # in 48b random search space
            cf = cf if not os.path.exists(cf) else ''
        return cf

    def init_kernel_manager(self):
        # Don't let Qt or ZMQ swallow KeyboardInterupts.
        if self.existing:
            self.kernel_manager = None
            return
        signal.signal(signal.SIGINT, signal.SIG_DFL)

        # Create a KernelManager and start a kernel.
        try:
            self.kernel_manager = self.kernel_manager_class(
                ip=self.ip,
                transport=self.transport,
                shell_port=self.shell_port,
                iopub_port=self.iopub_port,
                stdin_port=self.stdin_port,
                hb_port=self.hb_port,
                connection_file=self.connection_file,
                kernel_name=self.kernel_name,
                parent=self,
                ipython_dir=self.ipython_dir,
            )
        except NoSuchKernel:
            self.log.critical("Could not find kernel %s", self.kernel_name)
            self.exit(1)

        self.kernel_manager.client_factory = self.kernel_client_class
        self.kernel_manager.start_kernel(extra_arguments=self.kernel_argv)
        atexit.register(self.kernel_manager.cleanup_ipc_files)

        if self.sshserver:
            # ssh, write new connection file
            self.kernel_manager.write_connection_file()

        # in case KM defaults / ssh writing changes things:
        km = self.kernel_manager
        self.shell_port = km.shell_port
        self.iopub_port = km.iopub_port
        self.stdin_port = km.stdin_port
        self.hb_port = km.hb_port
        self.connection_file = km.connection_file

        atexit.register(self.kernel_manager.cleanup_connection_file)

    def init_kernel_client(self):
        if self.kernel_manager is not None:
            self.kernel_client = self.kernel_manager.client()
        else:
            self.kernel_client = self.kernel_client_class(
                ip=self.ip,
                transport=self.transport,
                shell_port=self.shell_port,
                iopub_port=self.iopub_port,
                stdin_port=self.stdin_port,
                hb_port=self.hb_port,
                connection_file=self.connection_file,
                parent=self,
            )

        self.kernel_client.start_channels()

    def initialize(self, argv=None):
        """
        Classes which mix this class in should call:
               IPythonConsoleApp.initialize(self,argv)
        """
        self.init_connection_file()
        default_secure(self.config)
        self.init_ssh()
        self.init_kernel_manager()
        self.init_kernel_client()
Esempio n. 15
0
class _FloatWidget(DOMWidget):
    value = CFloat(0.0, help="Float value", sync=True)
    disabled = Bool(False, help="Enable or disable user changes", sync=True)
    description = Unicode(
        help="Description of the value this widget represents", sync=True)
Esempio n. 16
0
class SqlMagic(Magics, Configurable):
    """Runs SQL statement on a database, specified by SQLAlchemy connect string.

    Provides the %%sql magic."""

    autolimit = Int(
        0,
        config=True,
        help="Automatically limit the size of the returned result sets")
    style = Unicode(
        'DEFAULT',
        config=True,
        help=
        "Set the table printing style to any of prettytable's defined styles (currently DEFAULT, MSWORD_FRIENDLY, PLAIN_COLUMNS, RANDOM)"
    )
    short_errors = Bool(
        True,
        config=True,
        help="Don't display the full traceback on SQL Programming Error")
    displaylimit = Int(
        0,
        config=True,
        help=
        "Automatic,ally limit the number of rows displayed (full result set is still stored)"
    )
    autopandas = Bool(
        False,
        config=True,
        help="Return Pandas DataFrames instead of regular result sets")
    feedback = Bool(True,
                    config=True,
                    help="Print number of rows affected by DML")

    def __init__(self, shell):
        Configurable.__init__(self, config=shell.config)
        Magics.__init__(self, shell=shell)

        # Add ourself to the list of module configurable via %config
        self.shell.configurables.append(self)

    @needs_local_scope
    @line_magic('sql')
    @cell_magic('sql')
    def execute(self, line, cell='', local_ns={}):
        """Runs SQL statement against a database, specified by SQLAlchemy connect string.

        If no database connection has been established, first word
        should be a SQLAlchemy connection string, or the user@db name
        of an established connection.

        Examples::

          %%sql postgresql://me:mypw@localhost/mydb
          SELECT * FROM mytable

          %%sql me@mydb
          DELETE FROM mytable

          %%sql
          DROP TABLE mytable

        SQLAlchemy connect string syntax examples:

          postgresql://me:mypw@localhost/mydb
          sqlite://
          mysql+pymysql://me:mypw@localhost/mydb

        """
        # save globals and locals so they can be referenced in bind vars
        user_ns = self.shell.user_ns
        user_ns.update(local_ns)

        parsed = sql.parse.parse('%s\n%s' % (line, cell))
        conn = sql.connection.Connection.get(parsed['connection'])
        try:
            result = sql.run.run(conn, parsed['sql'], self, user_ns)
            return result
        except (ProgrammingError, OperationalError) as e:
            # Sqlite apparently return all errors as OperationalError :/
            if self.short_errors:
                print(e)
            else:
                raise
Esempio n. 17
0
class FloatTextWidget(_FloatWidget):
    _view_name = Unicode('FloatTextView', sync=True)
Esempio n. 18
0
class ZMQTerminalInteractiveShell(TerminalInteractiveShell):
    """A subclass of TerminalInteractiveShell that uses the 0MQ kernel"""
    _executing = False
    _execution_state = Unicode('')
    kernel_timeout = Float(
        60,
        config=True,
        help="""Timeout for giving up on a kernel (in seconds).
        
        On first connect and restart, the console tests whether the
        kernel is running and responsive by sending kernel_info_requests.
        This sets the timeout in seconds for how long the kernel can take
        before being presumed dead.
        """)

    image_handler = Enum(('PIL', 'stream', 'tempfile', 'callable'),
                         config=True,
                         help="""
        Handler for image type output.  This is useful, for example,
        when connecting to the kernel in which pylab inline backend is
        activated.  There are four handlers defined.  'PIL': Use
        Python Imaging Library to popup image; 'stream': Use an
        external program to show the image.  Image will be fed into
        the STDIN of the program.  You will need to configure
        `stream_image_handler`; 'tempfile': Use an external program to
        show the image.  Image will be saved in a temporally file and
        the program is called with the temporally file.  You will need
        to configure `tempfile_image_handler`; 'callable': You can set
        any Python callable which is called with the image data.  You
        will need to configure `callable_image_handler`.
        """)

    stream_image_handler = List(config=True,
                                help="""
        Command to invoke an image viewer program when you are using
        'stream' image handler.  This option is a list of string where
        the first element is the command itself and reminders are the
        options for the command.  Raw image data is given as STDIN to
        the program.
        """)

    tempfile_image_handler = List(config=True,
                                  help="""
        Command to invoke an image viewer program when you are using
        'tempfile' image handler.  This option is a list of string
        where the first element is the command itself and reminders
        are the options for the command.  You can use {file} and
        {format} in the string to represent the location of the
        generated image file and image format.
        """)

    callable_image_handler = Any(config=True,
                                 help="""
        Callable object called via 'callable' image handler with one
        argument, `data`, which is `msg["content"]["data"]` where
        `msg` is the message from iopub channel.  For exmaple, you can
        find base64 encoded PNG data as `data['image/png']`.
        """)

    mime_preference = List(
        default_value=['image/png', 'image/jpeg', 'image/svg+xml'],
        config=True,
        allow_none=False,
        help="""
        Preferred object representation MIME type in order.  First
        matched MIME type will be used.
        """)

    manager = Instance('IPython.kernel.KernelManager')
    client = Instance('IPython.kernel.KernelClient')

    def _client_changed(self, name, old, new):
        self.session_id = new.session.session

    session_id = Unicode()

    def init_completer(self):
        """Initialize the completion machinery.

        This creates completion machinery that can be used by client code,
        either interactively in-process (typically triggered by the readline
        library), programatically (such as in test suites) or out-of-prcess
        (typically over the network by remote frontends).
        """
        from IPython.core.completerlib import (module_completer,
                                               magic_run_completer,
                                               cd_completer)

        self.Completer = ZMQCompleter(self, self.client, config=self.config)

        self.set_hook('complete_command', module_completer, str_key='import')
        self.set_hook('complete_command', module_completer, str_key='from')
        self.set_hook('complete_command', magic_run_completer, str_key='%run')
        self.set_hook('complete_command', cd_completer, str_key='%cd')

        # Only configure readline if we truly are using readline.  IPython can
        # do tab-completion over the network, in GUIs, etc, where readline
        # itself may be absent
        if self.has_readline:
            self.set_readline_completer()

    def run_cell(self, cell, store_history=True):
        """Run a complete IPython cell.
        
        Parameters
        ----------
        cell : str
          The code (including IPython code such as %magic functions) to run.
        store_history : bool
          If True, the raw and translated cell will be stored in IPython's
          history. For user code calling back into IPython's machinery, this
          should be set to False.
        """
        if (not cell) or cell.isspace():
            return

        if cell.strip() == 'exit':
            # explicitly handle 'exit' command
            return self.ask_exit()

        # flush stale replies, which could have been ignored, due to missed heartbeats
        while self.client.shell_channel.msg_ready():
            self.client.shell_channel.get_msg()
        # shell_channel.execute takes 'hidden', which is the inverse of store_hist
        msg_id = self.client.shell_channel.execute(cell, not store_history)

        # first thing is wait for any side effects (output, stdin, etc.)
        self._executing = True
        self._execution_state = "busy"
        while self._execution_state != 'idle' and self.client.is_alive():
            try:
                self.handle_stdin_request(msg_id, timeout=0.05)
            except Empty:
                # display intermediate print statements, etc.
                self.handle_iopub(msg_id)
                pass

        # after all of that is done, wait for the execute reply
        while self.client.is_alive():
            try:
                self.handle_execute_reply(msg_id, timeout=0.05)
            except Empty:
                pass
            else:
                break
        self._executing = False

    #-----------------
    # message handlers
    #-----------------

    def handle_execute_reply(self, msg_id, timeout=None):
        msg = self.client.shell_channel.get_msg(block=False, timeout=timeout)
        if msg["parent_header"].get("msg_id", None) == msg_id:

            self.handle_iopub(msg_id)

            content = msg["content"]
            status = content['status']

            if status == 'aborted':
                self.write('Aborted\n')
                return
            elif status == 'ok':
                # print execution payloads as well:
                for item in content["payload"]:
                    text = item.get('text', None)
                    if text:
                        page.page(text)

            elif status == 'error':
                for frame in content["traceback"]:
                    print(frame, file=io.stderr)

            self.execution_count = int(content["execution_count"] + 1)

    def handle_iopub(self, msg_id):
        """ Method to process subscribe channel's messages

           This method consumes and processes messages on the IOPub channel,
           such as stdout, stderr, pyout and status.
           
           It only displays output that is caused by the given msg_id
        """
        while self.client.iopub_channel.msg_ready():
            sub_msg = self.client.iopub_channel.get_msg()
            msg_type = sub_msg['header']['msg_type']
            parent = sub_msg["parent_header"]
            if (not parent) or msg_id == parent['msg_id']:
                if msg_type == 'status':
                    state = self._execution_state = sub_msg["content"][
                        "execution_state"]
                    # idle messages mean an individual sequence is complete,
                    # so break out of consumption to allow other things to take over.
                    if state == 'idle':
                        break

                elif msg_type == 'stream':
                    if sub_msg["content"]["name"] == "stdout":
                        print(sub_msg["content"]["data"],
                              file=io.stdout,
                              end="")
                        io.stdout.flush()
                    elif sub_msg["content"]["name"] == "stderr":
                        print(sub_msg["content"]["data"],
                              file=io.stderr,
                              end="")
                        io.stderr.flush()

                elif msg_type == 'pyout':
                    self.execution_count = int(
                        sub_msg["content"]["execution_count"])
                    format_dict = sub_msg["content"]["data"]
                    self.handle_rich_data(format_dict)
                    # taken from DisplayHook.__call__:
                    hook = self.displayhook
                    hook.start_displayhook()
                    hook.write_output_prompt()
                    hook.write_format_data(format_dict)
                    hook.log_output(format_dict)
                    hook.finish_displayhook()

                elif msg_type == 'display_data':
                    self.handle_rich_data(sub_msg["content"]["data"])

    _imagemime = {
        'image/png': 'png',
        'image/jpeg': 'jpeg',
        'image/svg+xml': 'svg',
    }

    def handle_rich_data(self, data):
        for mime in self.mime_preference:
            if mime in data and mime in self._imagemime:
                self.handle_image(data, mime)
                return

    def handle_image(self, data, mime):
        handler = getattr(self, 'handle_image_{0}'.format(self.image_handler),
                          None)
        if handler:
            handler(data, mime)

    def handle_image_PIL(self, data, mime):
        if mime not in ('image/png', 'image/jpeg'):
            return
        import PIL.Image
        raw = base64.decodestring(data[mime].encode('ascii'))
        img = PIL.Image.open(BytesIO(raw))
        img.show()

    def handle_image_stream(self, data, mime):
        raw = base64.decodestring(data[mime].encode('ascii'))
        imageformat = self._imagemime[mime]
        fmt = dict(format=imageformat)
        args = [s.format(**fmt) for s in self.stream_image_handler]
        with open(os.devnull, 'w') as devnull:
            proc = subprocess.Popen(args,
                                    stdin=subprocess.PIPE,
                                    stdout=devnull,
                                    stderr=devnull)
            proc.communicate(raw)

    def handle_image_tempfile(self, data, mime):
        raw = base64.decodestring(data[mime].encode('ascii'))
        imageformat = self._imagemime[mime]
        filename = 'tmp.{0}'.format(imageformat)
        with NamedFileInTemporaryDirectory(filename) as f, \
                    open(os.devnull, 'w') as devnull:
            f.write(raw)
            f.flush()
            fmt = dict(file=f.name, format=imageformat)
            args = [s.format(**fmt) for s in self.tempfile_image_handler]
            subprocess.call(args, stdout=devnull, stderr=devnull)

    def handle_image_callable(self, data, mime):
        self.callable_image_handler(data)

    def handle_stdin_request(self, msg_id, timeout=0.1):
        """ Method to capture raw_input
        """
        msg_rep = self.client.stdin_channel.get_msg(timeout=timeout)
        # in case any iopub came while we were waiting:
        self.handle_iopub(msg_id)
        if msg_id == msg_rep["parent_header"].get("msg_id"):
            # wrap SIGINT handler
            real_handler = signal.getsignal(signal.SIGINT)

            def double_int(sig, frame):
                # call real handler (forwards sigint to kernel),
                # then raise local interrupt, stopping local raw_input
                real_handler(sig, frame)
                raise KeyboardInterrupt

            signal.signal(signal.SIGINT, double_int)

            try:
                raw_data = raw_input(msg_rep["content"]["prompt"])
            except EOFError:
                # turn EOFError into EOF character
                raw_data = '\x04'
            except KeyboardInterrupt:
                sys.stdout.write('\n')
                return
            finally:
                # restore SIGINT handler
                signal.signal(signal.SIGINT, real_handler)

            # only send stdin reply if there *was not* another request
            # or execution finished while we were reading.
            if not (self.client.stdin_channel.msg_ready()
                    or self.client.shell_channel.msg_ready()):
                self.client.stdin_channel.input(raw_data)

    def mainloop(self, display_banner=False):
        while True:
            try:
                self.interact(display_banner=display_banner)
                #self.interact_with_readline()
                # XXX for testing of a readline-decoupled repl loop, call
                # interact_with_readline above
                break
            except KeyboardInterrupt:
                # this should not be necessary, but KeyboardInterrupt
                # handling seems rather unpredictable...
                self.write("\nKeyboardInterrupt in interact()\n")

    def wait_for_kernel(self, timeout=None):
        """method to wait for a kernel to be ready"""
        tic = time.time()
        self.client.hb_channel.unpause()
        while True:
            msg_id = self.client.kernel_info()
            reply = None
            while True:
                try:
                    reply = self.client.get_shell_msg(timeout=1)
                except Empty:
                    break
                else:
                    if reply['parent_header'].get('msg_id') == msg_id:
                        return True
            if timeout is not None \
                and (time.time() - tic) > timeout \
                and not self.client.hb_channel.is_beating():
                # heart failed
                return False
        return True

    def interact(self, display_banner=None):
        """Closely emulate the interactive Python console."""

        # batch run -> do not interact
        if self.exit_now:
            return

        if display_banner is None:
            display_banner = self.display_banner

        if isinstance(display_banner, basestring):
            self.show_banner(display_banner)
        elif display_banner:
            self.show_banner()

        more = False

        # run a non-empty no-op, so that we don't get a prompt until
        # we know the kernel is ready. This keeps the connection
        # message above the first prompt.
        if not self.wait_for_kernel(self.kernel_timeout):
            error("Kernel did not respond\n")
            return

        if self.has_readline:
            self.readline_startup_hook(self.pre_readline)
            hlen_b4_cell = self.readline.get_current_history_length()
        else:
            hlen_b4_cell = 0
        # exit_now is set by a call to %Exit or %Quit, through the
        # ask_exit callback.

        while not self.exit_now:
            if not self.client.is_alive():
                # kernel died, prompt for action or exit

                action = "restart" if self.manager else "wait for restart"
                ans = self.ask_yes_no("kernel died, %s ([y]/n)?" % action,
                                      default='y')
                if ans:
                    if self.manager:
                        self.manager.restart_kernel(True)
                    self.wait_for_kernel(self.kernel_timeout)
                else:
                    self.exit_now = True
                continue
            try:
                # protect prompt block from KeyboardInterrupt
                # when sitting on ctrl-C
                self.hooks.pre_prompt_hook()
                if more:
                    try:
                        prompt = self.prompt_manager.render('in2')
                    except Exception:
                        self.showtraceback()
                    if self.autoindent:
                        self.rl_do_indent = True

                else:
                    try:
                        prompt = self.separate_in + self.prompt_manager.render(
                            'in')
                    except Exception:
                        self.showtraceback()

                line = self.raw_input(prompt)
                if self.exit_now:
                    # quick exit on sys.std[in|out] close
                    break
                if self.autoindent:
                    self.rl_do_indent = False

            except KeyboardInterrupt:
                #double-guard against keyboardinterrupts during kbdint handling
                try:
                    self.write('\nKeyboardInterrupt\n')
                    source_raw = self.input_splitter.source_raw_reset()[1]
                    hlen_b4_cell = self._replace_rlhist_multiline(
                        source_raw, hlen_b4_cell)
                    more = False
                except KeyboardInterrupt:
                    pass
            except EOFError:
                if self.autoindent:
                    self.rl_do_indent = False
                    if self.has_readline:
                        self.readline_startup_hook(None)
                self.write('\n')
                self.exit()
            except bdb.BdbQuit:
                warn(
                    'The Python debugger has exited with a BdbQuit exception.\n'
                    'Because of how pdb handles the stack, it is impossible\n'
                    'for IPython to properly format this particular exception.\n'
                    'IPython will resume normal operation.')
            except:
                # exceptions here are VERY RARE, but they can be triggered
                # asynchronously by signal handlers, for example.
                self.showtraceback()
            else:
                self.input_splitter.push(line)
                more = self.input_splitter.push_accepts_more()
                if (self.SyntaxTB.last_syntax_error and self.autoedit_syntax):
                    self.edit_syntax_error()
                if not more:
                    source_raw = self.input_splitter.source_raw_reset()[1]
                    hlen_b4_cell = self._replace_rlhist_multiline(
                        source_raw, hlen_b4_cell)
                    self.run_cell(source_raw)

        # Turn off the exit flag, so the mainloop can be restarted if desired
        self.exit_now = False
Esempio n. 19
0
class FloatProgressWidget(_BoundedFloatWidget):
    _view_name = Unicode('ProgressView', sync=True)
Esempio n. 20
0
class HistoryManager(Configurable):
    """A class to organize all history-related functionality in one place.
    """
    # Public interface

    # An instance of the IPython shell we are attached to
    shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
    # Lists to hold processed and raw history. These start with a blank entry
    # so that we can index them starting from 1
    input_hist_parsed = List([""])
    input_hist_raw = List([""])
    # A list of directories visited during session
    dir_hist = List()

    def _dir_hist_default(self):
        try:
            return [os.getcwd()]
        except OSError:
            return []

    # A dict of output history, keyed with ints from the shell's
    # execution count. If there are several outputs from one command,
    # only the last one is stored.
    output_hist = Dict()
    # Contains all outputs, in lists of reprs.
    output_hist_reprs = Instance(defaultdict, args=(list, ))

    # String holding the path to the history file
    hist_file = Unicode(config=True)

    # The SQLite database
    db = Instance(sqlite3.Connection)
    # The number of the current session in the history database
    session_number = Int()
    # Should we log output to the database? (default no)
    db_log_output = Bool(False, config=True)
    # Write to database every x commands (higher values save disk access & power)
    #  Values of 1 or less effectively disable caching.
    db_cache_size = Int(0, config=True)
    # The input and output caches
    db_input_cache = List()
    db_output_cache = List()

    # Private interface
    # Variables used to store the three last inputs from the user.  On each new
    # history update, we populate the user's namespace with these, shifted as
    # necessary.
    _i00 = Unicode(u'')
    _i = Unicode(u'')
    _ii = Unicode(u'')
    _iii = Unicode(u'')

    # A set with all forms of the exit command, so that we don't store them in
    # the history (it's annoying to rewind the first entry and land on an exit
    # call).
    _exit_commands = Instance(set,
                              args=([
                                  'Quit', 'quit', 'Exit', 'exit', '%Quit',
                                  '%quit', '%Exit', '%exit'
                              ], ))

    def __init__(self, shell, config=None, **traits):
        """Create a new history manager associated with a shell instance.
        """
        # We need a pointer back to the shell for various tasks.
        super(HistoryManager, self).__init__(shell=shell,
                                             config=config,
                                             **traits)

        if self.hist_file == u'':
            # No one has set the hist_file, yet.
            if shell.profile:
                histfname = 'history-%s' % shell.profile
            else:
                histfname = 'history'
            self.hist_file = os.path.join(shell.ipython_dir,
                                          histfname + '.sqlite')

        try:
            self.init_db()
        except sqlite3.DatabaseError:
            if os.path.isfile(self.hist_file):
                # Try to move the file out of the way.
                newpath = os.path.join(self.shell.ipython_dir,
                                       "hist-corrupt.sqlite")
                os.rename(self.hist_file, newpath)
                print("ERROR! History file wasn't a valid SQLite database.",
                      "It was moved to %s" % newpath,
                      "and a new file created.")
                self.init_db()
            else:
                # The hist_file is probably :memory: or something else.
                raise

        self.new_session()

    def init_db(self):
        """Connect to the database, and create tables if necessary."""
        self.db = sqlite3.connect(self.hist_file)
        self.db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer
                        primary key autoincrement, start timestamp,
                        end timestamp, num_cmds integer, remark text)""")
        self.db.execute("""CREATE TABLE IF NOT EXISTS history 
                (session integer, line integer, source text, source_raw text,
                PRIMARY KEY (session, line))""")
        # Output history is optional, but ensure the table's there so it can be
        # enabled later.
        self.db.execute("""CREATE TABLE IF NOT EXISTS output_history
                        (session integer, line integer, output text,
                        PRIMARY KEY (session, line))""")
        self.db.commit()

    def new_session(self):
        """Get a new session number."""
        with self.db:
            cur = self.db.execute(
                """INSERT INTO sessions VALUES (NULL, ?, NULL,
                            NULL, "") """, (datetime.datetime.now(), ))
            self.session_number = cur.lastrowid

    def end_session(self):
        """Close the database session, filling in the end time and line count."""
        self.writeout_cache()
        with self.db:
            self.db.execute(
                """UPDATE sessions SET end=?, num_cmds=? WHERE
                            session==?""",
                (datetime.datetime.now(), len(self.input_hist_parsed) - 1,
                 self.session_number))
        self.session_number = 0

    def name_session(self, name):
        """Give the current session a name in the history database."""
        with self.db:
            self.db.execute("UPDATE sessions SET remark=? WHERE session==?",
                            (name, self.session_number))

    def reset(self, new_session=True):
        """Clear the session history, releasing all object references, and
        optionally open a new session."""
        if self.session_number:
            self.end_session()
        self.input_hist_parsed[:] = [""]
        self.input_hist_raw[:] = [""]
        self.output_hist.clear()
        # The directory history can't be completely empty
        self.dir_hist[:] = [os.getcwd()]

        if new_session:
            self.new_session()

    ## -------------------------------
    ## Methods for retrieving history:
    ## -------------------------------
    def _run_sql(self, sql, params, raw=True, output=False):
        """Prepares and runs an SQL query for the history database.
        
        Parameters
        ----------
        sql : str
          Any filtering expressions to go after SELECT ... FROM ...
        params : tuple
          Parameters passed to the SQL query (to replace "?")
        raw, output : bool
          See :meth:`get_range`
        
        Returns
        -------
        Tuples as :meth:`get_range`
        """
        toget = 'source_raw' if raw else 'source'
        sqlfrom = "history"
        if output:
            sqlfrom = "history LEFT JOIN output_history USING (session, line)"
            toget = "history.%s, output_history.output" % toget
        cur = self.db.execute("SELECT session, line, %s FROM %s " %\
                                (toget, sqlfrom) + sql, params)
        if output:  # Regroup into 3-tuples, and parse JSON
            loads = lambda out: json.loads(out) if out else None
            return ((ses, lin, (inp, loads(out))) \
                                        for ses, lin, inp, out in cur)
        return cur

    def get_tail(self, n=10, raw=True, output=False, include_latest=False):
        """Get the last n lines from the history database.
        
        Parameters
        ----------
        n : int
          The number of lines to get
        raw, output : bool
          See :meth:`get_range`
        include_latest : bool
          If False (default), n+1 lines are fetched, and the latest one
          is discarded. This is intended to be used where the function
          is called by a user command, which it should not return.
        
        Returns
        -------
        Tuples as :meth:`get_range`
        """
        self.writeout_cache()
        if not include_latest:
            n += 1
        cur = self._run_sql("ORDER BY session DESC, line DESC LIMIT ?", (n, ),
                            raw=raw,
                            output=output)
        if not include_latest:
            return reversed(list(cur)[1:])
        return reversed(list(cur))

    def search(self, pattern="*", raw=True, search_raw=True, output=False):
        """Search the database using unix glob-style matching (wildcards
        * and ?).
        
        Parameters
        ----------
        pattern : str
          The wildcarded pattern to match when searching
        search_raw : bool
          If True, search the raw input, otherwise, the parsed input
        raw, output : bool
          See :meth:`get_range`
        
        Returns
        -------
        Tuples as :meth:`get_range`
        """
        tosearch = "source_raw" if search_raw else "source"
        if output:
            tosearch = "history." + tosearch
        self.writeout_cache()
        return self._run_sql("WHERE %s GLOB ?" % tosearch, (pattern, ),
                             raw=raw,
                             output=output)

    def _get_range_session(self, start=1, stop=None, raw=True, output=False):
        """Get input and output history from the current session. Called by
        get_range, and takes similar parameters."""
        input_hist = self.input_hist_raw if raw else self.input_hist_parsed

        n = len(input_hist)
        if start < 0:
            start += n
        if not stop:
            stop = n
        elif stop < 0:
            stop += n

        for i in range(start, stop):
            if output:
                line = (input_hist[i], self.output_hist_reprs.get(i))
            else:
                line = input_hist[i]
            yield (0, i, line)

    def get_range(self, session=0, start=1, stop=None, raw=True, output=False):
        """Retrieve input by session.
        
        Parameters
        ----------
        session : int
            Session number to retrieve. The current session is 0, and negative
            numbers count back from current session, so -1 is previous session.
        start : int
            First line to retrieve.
        stop : int
            End of line range (excluded from output itself). If None, retrieve
            to the end of the session.
        raw : bool
            If True, return untranslated input
        output : bool
            If True, attempt to include output. This will be 'real' Python
            objects for the current session, or text reprs from previous
            sessions if db_log_output was enabled at the time. Where no output
            is found, None is used.
            
        Returns
        -------
        An iterator over the desired lines. Each line is a 3-tuple, either
        (session, line, input) if output is False, or
        (session, line, (input, output)) if output is True.
        """
        if session == 0 or session == self.session_number:  # Current session
            return self._get_range_session(start, stop, raw, output)
        if session < 0:
            session += self.session_number

        if stop:
            lineclause = "line >= ? AND line < ?"
            params = (session, start, stop)
        else:
            lineclause = "line>=?"
            params = (session, start)

        return self._run_sql("WHERE session==? AND %s"
                             "" % lineclause,
                             params,
                             raw=raw,
                             output=output)

    def get_range_by_str(self, rangestr, raw=True, output=False):
        """Get lines of history from a string of ranges, as used by magic
        commands %hist, %save, %macro, etc.
        
        Parameters
        ----------
        rangestr : str
          A string specifying ranges, e.g. "5 ~2/1-4". See
          :func:`magic_history` for full details.
        raw, output : bool
          As :meth:`get_range`
          
        Returns
        -------
        Tuples as :meth:`get_range`
        """
        for sess, s, e in extract_hist_ranges(rangestr):
            for line in self.get_range(sess, s, e, raw=raw, output=output):
                yield line

    ## ----------------------------
    ## Methods for storing history:
    ## ----------------------------
    def store_inputs(self, line_num, source, source_raw=None):
        """Store source and raw input in history and create input cache
        variables _i*.
        
        Parameters
        ----------
        line_num : int
          The prompt number of this input.
        
        source : str
          Python input.

        source_raw : str, optional
          If given, this is the raw input without any IPython transformations
          applied to it.  If not given, ``source`` is used.
        """
        if source_raw is None:
            source_raw = source
        source = source.rstrip('\n')
        source_raw = source_raw.rstrip('\n')

        # do not store exit/quit commands
        if source_raw.strip() in self._exit_commands:
            return

        self.input_hist_parsed.append(source)
        self.input_hist_raw.append(source_raw)

        self.db_input_cache.append((line_num, source, source_raw))
        # Trigger to flush cache and write to DB.
        if len(self.db_input_cache) >= self.db_cache_size:
            self.writeout_cache()

        # update the auto _i variables
        self._iii = self._ii
        self._ii = self._i
        self._i = self._i00
        self._i00 = source_raw

        # hackish access to user namespace to create _i1,_i2... dynamically
        new_i = '_i%s' % line_num
        to_main = {
            '_i': self._i,
            '_ii': self._ii,
            '_iii': self._iii,
            new_i: self._i00
        }
        self.shell.user_ns.update(to_main)

    def store_output(self, line_num):
        """If database output logging is enabled, this saves all the
        outputs from the indicated prompt number to the database. It's
        called by run_cell after code has been executed.
        
        Parameters
        ----------
        line_num : int
          The line number from which to save outputs
        """
        if (not self.db_log_output) or not self.output_hist_reprs[line_num]:
            return
        output = json.dumps(self.output_hist_reprs[line_num])

        self.db_output_cache.append((line_num, output))
        if self.db_cache_size <= 1:
            self.writeout_cache()

    def _writeout_input_cache(self):
        for line in self.db_input_cache:
            with self.db:
                self.db.execute("INSERT INTO history VALUES (?, ?, ?, ?)",
                                (self.session_number, ) + line)

    def _writeout_output_cache(self):
        for line in self.db_output_cache:
            with self.db:
                self.db.execute("INSERT INTO output_history VALUES (?, ?, ?)",
                                (self.session_number, ) + line)

    def writeout_cache(self):
        """Write any entries in the cache to the database."""
        try:
            self._writeout_input_cache()
        except sqlite3.IntegrityError:
            self.new_session()
            print("ERROR! Session/line number was not unique in",
                  "database. History logging moved to new session",
                  self.session_number)
            try:  # Try writing to the new session. If this fails, don't recurse
                self.writeout_cache()
            except sqlite3.IntegrityError:
                pass
        finally:
            self.db_input_cache = []

        try:
            self._writeout_output_cache()
        except sqlite3.IntegrityError:
            print("!! Session/line number for output was not unique",
                  "in database. Output will not be stored.")
        finally:
            self.db_output_cache = []
Esempio n. 21
0
class KernelManager(ConnectionFileMixin):
    """Manages a single kernel in a subprocess on this host.

    This version starts kernels with Popen.
    """

    # The PyZMQ Context to use for communication with the kernel.
    context = Instance(zmq.Context)

    def _context_default(self):
        return zmq.Context.instance()

    # the class to create with our `client` method
    client_class = DottedObjectName(
        'IPython.kernel.blocking.BlockingKernelClient')
    client_factory = Type()

    def _client_class_changed(self, name, old, new):
        self.client_factory = import_item(str(new))

    # The kernel process with which the KernelManager is communicating.
    # generally a Popen instance
    kernel = Any()

    kernel_spec_manager = Instance(kernelspec.KernelSpecManager)

    def _kernel_spec_manager_default(self):
        return kernelspec.KernelSpecManager(ipython_dir=self.ipython_dir)

    kernel_name = Unicode(kernelspec.NATIVE_KERNEL_NAME)

    kernel_spec = Instance(kernelspec.KernelSpec)

    def _kernel_spec_default(self):
        return self.kernel_spec_manager.get_kernel_spec(self.kernel_name)

    def _kernel_name_changed(self, name, old, new):
        if new == 'python':
            self.kernel_name = kernelspec.NATIVE_KERNEL_NAME
            # This triggered another run of this function, so we can exit now
            return
        self.kernel_spec = self.kernel_spec_manager.get_kernel_spec(new)
        self.ipython_kernel = new in {'python', 'python2', 'python3'}

    kernel_cmd = List(Unicode,
                      config=True,
                      help="""DEPRECATED: Use kernel_name instead.
        
        The Popen Command to launch the kernel.
        Override this if you have a custom kernel.
        If kernel_cmd is specified in a configuration file,
        IPython does not pass any arguments to the kernel,
        because it cannot make any assumptions about the 
        arguments that the kernel understands. In particular,
        this means that the kernel does not receive the
        option --debug if it given on the IPython command line.
        """)

    def _kernel_cmd_changed(self, name, old, new):
        warnings.warn("Setting kernel_cmd is deprecated, use kernel_spec to "
                      "start different kernels.")
        self.ipython_kernel = False

    ipython_kernel = Bool(True)

    ipython_dir = Unicode()

    def _ipython_dir_default(self):
        return get_ipython_dir()

    # Protected traits
    _launch_args = Any()
    _control_socket = Any()

    _restarter = Any()

    autorestart = Bool(False,
                       config=True,
                       help="""Should we autorestart the kernel if it dies.""")

    def __del__(self):
        self._close_control_socket()
        self.cleanup_connection_file()

    #--------------------------------------------------------------------------
    # Kernel restarter
    #--------------------------------------------------------------------------

    def start_restarter(self):
        pass

    def stop_restarter(self):
        pass

    def add_restart_callback(self, callback, event='restart'):
        """register a callback to be called when a kernel is restarted"""
        if self._restarter is None:
            return
        self._restarter.add_callback(callback, event)

    def remove_restart_callback(self, callback, event='restart'):
        """unregister a callback to be called when a kernel is restarted"""
        if self._restarter is None:
            return
        self._restarter.remove_callback(callback, event)

    #--------------------------------------------------------------------------
    # create a Client connected to our Kernel
    #--------------------------------------------------------------------------

    def client(self, **kwargs):
        """Create a client configured to connect to our kernel"""
        if self.client_factory is None:
            self.client_factory = import_item(self.client_class)

        kw = {}
        kw.update(self.get_connection_info())
        kw.update(
            dict(
                connection_file=self.connection_file,
                session=self.session,
                parent=self,
            ))

        # add kwargs last, for manual overrides
        kw.update(kwargs)
        return self.client_factory(**kw)

    #--------------------------------------------------------------------------
    # Kernel management
    #--------------------------------------------------------------------------

    def format_kernel_cmd(self, extra_arguments=None):
        """replace templated args (e.g. {connection_file})"""
        extra_arguments = extra_arguments or []
        if self.kernel_cmd:
            cmd = self.kernel_cmd + extra_arguments
        else:
            cmd = self.kernel_spec.argv + extra_arguments

        ns = dict(connection_file=self.connection_file)
        ns.update(self._launch_args)

        pat = re.compile(r'\{([A-Za-z0-9_]+)\}')

        def from_ns(match):
            """Get the key out of ns if it's there, otherwise no change."""
            return ns.get(match.group(1), match.group())

        return [pat.sub(from_ns, arg) for arg in cmd]

    def _launch_kernel(self, kernel_cmd, **kw):
        """actually launch the kernel

        override in a subclass to launch kernel subprocesses differently
        """
        return launch_kernel(kernel_cmd, **kw)

    # Control socket used for polite kernel shutdown

    def _connect_control_socket(self):
        if self._control_socket is None:
            self._control_socket = self.connect_control()
            self._control_socket.linger = 100

    def _close_control_socket(self):
        if self._control_socket is None:
            return
        self._control_socket.close()
        self._control_socket = None

    def start_kernel(self, **kw):
        """Starts a kernel on this host in a separate process.

        If random ports (port=0) are being used, this method must be called
        before the channels are created.

        Parameters
        ----------
        **kw : optional
             keyword arguments that are passed down to build the kernel_cmd
             and launching the kernel (e.g. Popen kwargs).
        """
        if self.transport == 'tcp' and not is_local_ip(self.ip):
            raise RuntimeError(
                "Can only launch a kernel on a local interface. "
                "Make sure that the '*_address' attributes are "
                "configured properly. "
                "Currently valid addresses are: %s" % local_ips())

        # write connection file / get default ports
        self.write_connection_file()

        # save kwargs for use in restart
        self._launch_args = kw.copy()
        # build the Popen cmd
        extra_arguments = kw.pop('extra_arguments', [])
        kernel_cmd = self.format_kernel_cmd(extra_arguments=extra_arguments)
        if self.kernel_cmd:
            # If kernel_cmd has been set manually, don't refer to a kernel spec
            env = os.environ
        else:
            # Environment variables from kernel spec are added to os.environ
            env = os.environ.copy()
            env.update(self.kernel_spec.env or {})
        # launch the kernel subprocess
        self.kernel = self._launch_kernel(kernel_cmd, env=env, **kw)
        self.start_restarter()
        self._connect_control_socket()

    def request_shutdown(self, restart=False):
        """Send a shutdown request via control channel
        
        On Windows, this just kills kernels instead, because the shutdown
        messages don't work.
        """
        content = dict(restart=restart)
        msg = self.session.msg("shutdown_request", content=content)
        self.session.send(self._control_socket, msg)

    def finish_shutdown(self, waittime=1, pollinterval=0.1):
        """Wait for kernel shutdown, then kill process if it doesn't shutdown.
        
        This does not send shutdown requests - use :meth:`request_shutdown`
        first.
        """
        for i in range(int(waittime / pollinterval)):
            if self.is_alive():
                time.sleep(pollinterval)
            else:
                break
        else:
            # OK, we've waited long enough.
            if self.has_kernel:
                self._kill_kernel()

    def cleanup(self, connection_file=True):
        """Clean up resources when the kernel is shut down"""
        if connection_file:
            self.cleanup_connection_file()

        self.cleanup_ipc_files()
        self._close_control_socket()

    def shutdown_kernel(self, now=False, restart=False):
        """Attempts to the stop the kernel process cleanly.

        This attempts to shutdown the kernels cleanly by:

        1. Sending it a shutdown message over the shell channel.
        2. If that fails, the kernel is shutdown forcibly by sending it
           a signal.

        Parameters
        ----------
        now : bool
            Should the kernel be forcible killed *now*. This skips the
            first, nice shutdown attempt.
        restart: bool
            Will this kernel be restarted after it is shutdown. When this
            is True, connection files will not be cleaned up.
        """
        # Stop monitoring for restarting while we shutdown.
        self.stop_restarter()

        if now:
            self._kill_kernel()
        else:
            self.request_shutdown(restart=restart)
            # Don't send any additional kernel kill messages immediately, to give
            # the kernel a chance to properly execute shutdown actions. Wait for at
            # most 1s, checking every 0.1s.
            self.finish_shutdown()

        self.cleanup(connection_file=not restart)

    def restart_kernel(self, now=False, **kw):
        """Restarts a kernel with the arguments that were used to launch it.

        If the old kernel was launched with random ports, the same ports will be
        used for the new kernel. The same connection file is used again.

        Parameters
        ----------
        now : bool, optional
            If True, the kernel is forcefully restarted *immediately*, without
            having a chance to do any cleanup action.  Otherwise the kernel is
            given 1s to clean up before a forceful restart is issued.

            In all cases the kernel is restarted, the only difference is whether
            it is given a chance to perform a clean shutdown or not.

        **kw : optional
            Any options specified here will overwrite those used to launch the
            kernel.
        """
        if self._launch_args is None:
            raise RuntimeError("Cannot restart the kernel. "
                               "No previous call to 'start_kernel'.")
        else:
            # Stop currently running kernel.
            self.shutdown_kernel(now=now, restart=True)

            # Start new kernel.
            self._launch_args.update(kw)
            self.start_kernel(**self._launch_args)

    @property
    def has_kernel(self):
        """Has a kernel been started that we are managing."""
        return self.kernel is not None

    def _kill_kernel(self):
        """Kill the running kernel.

        This is a private method, callers should use shutdown_kernel(now=True).
        """
        if self.has_kernel:

            # Signal the kernel to terminate (sends SIGKILL on Unix and calls
            # TerminateProcess() on Win32).
            try:
                self.kernel.kill()
            except OSError as e:
                # In Windows, we will get an Access Denied error if the process
                # has already terminated. Ignore it.
                if sys.platform == 'win32':
                    if e.winerror != 5:
                        raise
                # On Unix, we may get an ESRCH error if the process has already
                # terminated. Ignore it.
                else:
                    from errno import ESRCH
                    if e.errno != ESRCH:
                        raise

            # Block until the kernel terminates.
            self.kernel.wait()
            self.kernel = None
        else:
            raise RuntimeError("Cannot kill kernel. No kernel is running!")

    def interrupt_kernel(self):
        """Interrupts the kernel by sending it a signal.

        Unlike ``signal_kernel``, this operation is well supported on all
        platforms.
        """
        if self.has_kernel:
            if sys.platform == 'win32':
                from .zmq.parentpoller import ParentPollerWindows as Poller
                Poller.send_interrupt(self.kernel.win32_interrupt_event)
            else:
                self.kernel.send_signal(signal.SIGINT)
        else:
            raise RuntimeError(
                "Cannot interrupt kernel. No kernel is running!")

    def signal_kernel(self, signum):
        """Sends a signal to the kernel.

        Note that since only SIGTERM is supported on Windows, this function is
        only useful on Unix systems.
        """
        if self.has_kernel:
            self.kernel.send_signal(signum)
        else:
            raise RuntimeError("Cannot signal kernel. No kernel is running!")

    def is_alive(self):
        """Is the kernel process still running?"""
        if self.has_kernel:
            if self.kernel.poll() is None:
                return True
            else:
                return False
        else:
            # we don't have a kernel
            return False
Esempio n. 22
0
class IPythonWidget(FrontendWidget):
    """ A FrontendWidget for an IPython kernel.
    """

    # If set, the 'custom_edit_requested(str, int)' signal will be emitted when
    # an editor is needed for a file. This overrides 'editor' and 'editor_line'
    # settings.
    custom_edit = Bool(False)
    custom_edit_requested = QtCore.Signal(object, object)

    editor = Unicode(default_editor,
                     config=True,
                     help="""
        A command for invoking a system text editor. If the string contains a
        {filename} format specifier, it will be used. Otherwise, the filename
        will be appended to the end the command.
        """)

    editor_line = Unicode(config=True,
                          help="""
        The editor command to use when a specific line number is requested. The
        string should contain two format specifiers: {line} and {filename}. If
        this parameter is not specified, the line number option to the %edit
        magic will be ignored.
        """)

    style_sheet = Unicode(config=True,
                          help="""
        A CSS stylesheet. The stylesheet can contain classes for:
            1. Qt: QPlainTextEdit, QFrame, QWidget, etc
            2. Pygments: .c, .k, .o, etc. (see PygmentsHighlighter)
            3. IPython: .error, .in-prompt, .out-prompt, etc
        """)

    syntax_style = Unicode(config=True,
                           help="""
        If not empty, use this Pygments style for syntax highlighting.
        Otherwise, the style sheet is queried for Pygments style
        information.
        """)

    # Prompts.
    in_prompt = Unicode(default_in_prompt, config=True)
    out_prompt = Unicode(default_out_prompt, config=True)
    input_sep = Unicode(default_input_sep, config=True)
    output_sep = Unicode(default_output_sep, config=True)
    output_sep2 = Unicode(default_output_sep2, config=True)

    # FrontendWidget protected class variables.
    _input_splitter_class = IPythonInputSplitter
    _transform_prompt = staticmethod(transform_ipy_prompt)

    # IPythonWidget protected class variables.
    _PromptBlock = namedtuple('_PromptBlock', ['block', 'length', 'number'])
    _payload_source_edit = zmq_shell_source + '.edit_magic'
    _payload_source_exit = zmq_shell_source + '.ask_exit'
    _payload_source_next_input = zmq_shell_source + '.set_next_input'
    _payload_source_page = 'IPython.zmq.page.page'
    _retrying_history_request = False

    #---------------------------------------------------------------------------
    # 'object' interface
    #---------------------------------------------------------------------------

    def __init__(self, *args, **kw):
        super(IPythonWidget, self).__init__(*args, **kw)

        # IPythonWidget protected variables.
        self._payload_handlers = {
            self._payload_source_edit: self._handle_payload_edit,
            self._payload_source_exit: self._handle_payload_exit,
            self._payload_source_page: self._handle_payload_page,
            self._payload_source_next_input: self._handle_payload_next_input
        }
        self._previous_prompt_obj = None
        self._keep_kernel_on_exit = None

        # Initialize widget styling.
        if self.style_sheet:
            self._style_sheet_changed()
            self._syntax_style_changed()
        else:
            self.set_default_style()

    #---------------------------------------------------------------------------
    # 'BaseFrontendMixin' abstract interface
    #---------------------------------------------------------------------------

    def _handle_complete_reply(self, rep):
        """ Reimplemented to support IPython's improved completion machinery.
        """
        self.log.debug("complete: %s", rep.get('content', ''))
        cursor = self._get_cursor()
        info = self._request_info.get('complete')
        if info and info.id == rep['parent_header']['msg_id'] and \
                info.pos == cursor.position():
            matches = rep['content']['matches']
            text = rep['content']['matched_text']
            offset = len(text)

            # Clean up matches with period and path separators if the matched
            # text has not been transformed. This is done by truncating all
            # but the last component and then suitably decreasing the offset
            # between the current cursor position and the start of completion.
            if len(matches) > 1 and matches[0][:offset] == text:
                parts = re.split(r'[./\\]', text)
                sep_count = len(parts) - 1
                if sep_count:
                    chop_length = sum(map(len, parts[:sep_count])) + sep_count
                    matches = [match[chop_length:] for match in matches]
                    offset -= chop_length

            # Move the cursor to the start of the match and complete.
            cursor.movePosition(QtGui.QTextCursor.Left, n=offset)
            self._complete_with_items(cursor, matches)

    def _handle_execute_reply(self, msg):
        """ Reimplemented to support prompt requests.
        """
        msg_id = msg['parent_header'].get('msg_id')
        info = self._request_info['execute'].get(msg_id)
        if info and info.kind == 'prompt':
            number = msg['content']['execution_count'] + 1
            self._show_interpreter_prompt(number)
            self._request_info['execute'].pop(msg_id)
        else:
            super(IPythonWidget, self)._handle_execute_reply(msg)

    def _handle_history_reply(self, msg):
        """ Implemented to handle history tail replies, which are only supported
            by the IPython kernel.
        """
        self.log.debug("history: %s", msg.get('content', ''))
        content = msg['content']
        if 'history' not in content:
            self.log.error("History request failed: %r" % content)
            if content.get('status', '') == 'aborted' and \
                                            not self._retrying_history_request:
                # a *different* action caused this request to be aborted, so
                # we should try again.
                self.log.error("Retrying aborted history request")
                # prevent multiple retries of aborted requests:
                self._retrying_history_request = True
                # wait out the kernel's queue flush, which is currently timed at 0.1s
                time.sleep(0.25)
                self.kernel_manager.shell_channel.history(
                    hist_access_type='tail', n=1000)
            else:
                self._retrying_history_request = False
            return
        # reset retry flag
        self._retrying_history_request = False
        history_items = content['history']
        items = []
        last_cell = u""
        for _, _, cell in history_items:
            cell = cell.rstrip()
            if cell != last_cell:
                items.append(cell)
                last_cell = cell
        self._set_history(items)

    def _handle_pyout(self, msg):
        """ Reimplemented for IPython-style "display hook".
        """
        self.log.debug("pyout: %s", msg.get('content', ''))
        if not self._hidden and self._is_from_this_session(msg):
            content = msg['content']
            prompt_number = content['execution_count']
            data = content['data']
            if data.has_key('text/html'):
                self._append_plain_text(self.output_sep, True)
                self._append_html(self._make_out_prompt(prompt_number), True)
                html = data['text/html']
                self._append_plain_text('\n', True)
                self._append_html(html + self.output_sep2, True)
            elif data.has_key('text/plain'):
                self._append_plain_text(self.output_sep, True)
                self._append_html(self._make_out_prompt(prompt_number), True)
                text = data['text/plain']
                # If the repr is multiline, make sure we start on a new line,
                # so that its lines are aligned.
                if "\n" in text and not self.output_sep.endswith("\n"):
                    self._append_plain_text('\n', True)
                self._append_plain_text(text + self.output_sep2, True)

    def _handle_display_data(self, msg):
        """ The base handler for the ``display_data`` message.
        """
        self.log.debug("display: %s", msg.get('content', ''))
        # For now, we don't display data from other frontends, but we
        # eventually will as this allows all frontends to monitor the display
        # data. But we need to figure out how to handle this in the GUI.
        if not self._hidden and self._is_from_this_session(msg):
            source = msg['content']['source']
            data = msg['content']['data']
            metadata = msg['content']['metadata']
            # In the regular IPythonWidget, we simply print the plain text
            # representation.
            if data.has_key('text/html'):
                html = data['text/html']
                self._append_html(html, True)
            elif data.has_key('text/plain'):
                text = data['text/plain']
                self._append_plain_text(text, True)
            # This newline seems to be needed for text and html output.
            self._append_plain_text(u'\n', True)

    def _started_channels(self):
        """ Reimplemented to make a history request.
        """
        super(IPythonWidget, self)._started_channels()
        self.kernel_manager.shell_channel.history(hist_access_type='tail',
                                                  n=1000)

    #---------------------------------------------------------------------------
    # 'ConsoleWidget' public interface
    #---------------------------------------------------------------------------

    #---------------------------------------------------------------------------
    # 'FrontendWidget' public interface
    #---------------------------------------------------------------------------

    def execute_file(self, path, hidden=False):
        """ Reimplemented to use the 'run' magic.
        """
        # Use forward slashes on Windows to avoid escaping each separator.
        if sys.platform == 'win32':
            path = os.path.normpath(path).replace('\\', '/')

        # Perhaps we should not be using %run directly, but while we
        # are, it is necessary to quote or escape filenames containing spaces
        # or quotes.

        # In earlier code here, to minimize escaping, we sometimes quoted the
        # filename with single quotes. But to do this, this code must be
        # platform-aware, because run uses shlex rather than python string
        # parsing, so that:
        # * In Win: single quotes can be used in the filename without quoting,
        #   and we cannot use single quotes to quote the filename.
        # * In *nix: we can escape double quotes in a double quoted filename,
        #   but can't escape single quotes in a single quoted filename.

        # So to keep this code non-platform-specific and simple, we now only
        # use double quotes to quote filenames, and escape when needed:
        if ' ' in path or "'" in path or '"' in path:
            path = '"%s"' % path.replace('"', '\\"')
        self.execute('%%run %s' % path, hidden=hidden)

    #---------------------------------------------------------------------------
    # 'FrontendWidget' protected interface
    #---------------------------------------------------------------------------

    def _complete(self):
        """ Reimplemented to support IPython's improved completion machinery.
        """
        # We let the kernel split the input line, so we *always* send an empty
        # text field. Readline-based frontends do get a real text field which
        # they can use.
        text = ''

        # Send the completion request to the kernel
        msg_id = self.kernel_manager.shell_channel.complete(
            text,  # text
            self._get_input_buffer_cursor_line(),  # line
            self._get_input_buffer_cursor_column(),  # cursor_pos
            self.input_buffer)  # block
        pos = self._get_cursor().position()
        info = self._CompletionRequest(msg_id, pos)
        self._request_info['complete'] = info

    def _process_execute_error(self, msg):
        """ Reimplemented for IPython-style traceback formatting.
        """
        content = msg['content']
        traceback = '\n'.join(content['traceback']) + '\n'
        if False:
            # FIXME: For now, tracebacks come as plain text, so we can't use
            # the html renderer yet.  Once we refactor ultratb to produce
            # properly styled tracebacks, this branch should be the default
            traceback = traceback.replace(' ', '&nbsp;')
            traceback = traceback.replace('\n', '<br/>')

            ename = content['ename']
            ename_styled = '<span class="error">%s</span>' % ename
            traceback = traceback.replace(ename, ename_styled)

            self._append_html(traceback)
        else:
            # This is the fallback for now, using plain text with ansi escapes
            self._append_plain_text(traceback)

    def _process_execute_payload(self, item):
        """ Reimplemented to dispatch payloads to handler methods.
        """
        handler = self._payload_handlers.get(item['source'])
        if handler is None:
            # We have no handler for this type of payload, simply ignore it
            return False
        else:
            handler(item)
            return True

    def _show_interpreter_prompt(self, number=None):
        """ Reimplemented for IPython-style prompts.
        """
        # If a number was not specified, make a prompt number request.
        if number is None:
            msg_id = self.kernel_manager.shell_channel.execute('', silent=True)
            info = self._ExecutionRequest(msg_id, 'prompt')
            self._request_info['execute'][msg_id] = info
            return

        # Show a new prompt and save information about it so that it can be
        # updated later if the prompt number turns out to be wrong.
        self._prompt_sep = self.input_sep
        self._show_prompt(self._make_in_prompt(number), html=True)
        block = self._control.document().lastBlock()
        length = len(self._prompt)
        self._previous_prompt_obj = self._PromptBlock(block, length, number)

        # Update continuation prompt to reflect (possibly) new prompt length.
        self._set_continuation_prompt(self._make_continuation_prompt(
            self._prompt),
                                      html=True)

    def _show_interpreter_prompt_for_reply(self, msg):
        """ Reimplemented for IPython-style prompts.
        """
        # Update the old prompt number if necessary.
        content = msg['content']
        # abort replies do not have any keys:
        if content['status'] == 'aborted':
            if self._previous_prompt_obj:
                previous_prompt_number = self._previous_prompt_obj.number
            else:
                previous_prompt_number = 0
        else:
            previous_prompt_number = content['execution_count']
        if self._previous_prompt_obj and \
                self._previous_prompt_obj.number != previous_prompt_number:
            block = self._previous_prompt_obj.block

            # Make sure the prompt block has not been erased.
            if block.isValid() and block.text():

                # Remove the old prompt and insert a new prompt.
                cursor = QtGui.QTextCursor(block)
                cursor.movePosition(QtGui.QTextCursor.Right,
                                    QtGui.QTextCursor.KeepAnchor,
                                    self._previous_prompt_obj.length)
                prompt = self._make_in_prompt(previous_prompt_number)
                self._prompt = self._insert_html_fetching_plain_text(
                    cursor, prompt)

                # When the HTML is inserted, Qt blows away the syntax
                # highlighting for the line, so we need to rehighlight it.
                self._highlighter.rehighlightBlock(cursor.block())

            self._previous_prompt_obj = None

        # Show a new prompt with the kernel's estimated prompt number.
        self._show_interpreter_prompt(previous_prompt_number + 1)

    #---------------------------------------------------------------------------
    # 'IPythonWidget' interface
    #---------------------------------------------------------------------------

    def set_default_style(self, colors='lightbg'):
        """ Sets the widget style to the class defaults.

        Parameters:
        -----------
        colors : str, optional (default lightbg)
            Whether to use the default IPython light background or dark
            background or B&W style.
        """
        colors = colors.lower()
        if colors == 'lightbg':
            self.style_sheet = styles.default_light_style_sheet
            self.syntax_style = styles.default_light_syntax_style
        elif colors == 'linux':
            self.style_sheet = styles.default_dark_style_sheet
            self.syntax_style = styles.default_dark_syntax_style
        elif colors == 'nocolor':
            self.style_sheet = styles.default_bw_style_sheet
            self.syntax_style = styles.default_bw_syntax_style
        else:
            raise KeyError("No such color scheme: %s" % colors)

    #---------------------------------------------------------------------------
    # 'IPythonWidget' protected interface
    #---------------------------------------------------------------------------

    def _edit(self, filename, line=None):
        """ Opens a Python script for editing.

        Parameters:
        -----------
        filename : str
            A path to a local system file.

        line : int, optional
            A line of interest in the file.
        """
        if self.custom_edit:
            self.custom_edit_requested.emit(filename, line)
        elif not self.editor:
            self._append_plain_text(
                'No default editor available.\n'
                'Specify a GUI text editor in the `IPythonWidget.editor` '
                'configurable to enable the %edit magic')
        else:
            try:
                filename = '"%s"' % filename
                if line and self.editor_line:
                    command = self.editor_line.format(filename=filename,
                                                      line=line)
                else:
                    try:
                        command = self.editor.format()
                    except KeyError:
                        command = self.editor.format(filename=filename)
                    else:
                        command += ' ' + filename
            except KeyError:
                self._append_plain_text('Invalid editor command.\n')
            else:
                try:
                    Popen(command, shell=True)
                except OSError:
                    msg = 'Opening editor with command "%s" failed.\n'
                    self._append_plain_text(msg % command)

    def _make_in_prompt(self, number):
        """ Given a prompt number, returns an HTML In prompt.
        """
        try:
            body = self.in_prompt % number
        except TypeError:
            # allow in_prompt to leave out number, e.g. '>>> '
            body = self.in_prompt
        return '<span class="in-prompt">%s</span>' % body

    def _make_continuation_prompt(self, prompt):
        """ Given a plain text version of an In prompt, returns an HTML
            continuation prompt.
        """
        end_chars = '...: '
        space_count = len(prompt.lstrip('\n')) - len(end_chars)
        body = '&nbsp;' * space_count + end_chars
        return '<span class="in-prompt">%s</span>' % body

    def _make_out_prompt(self, number):
        """ Given a prompt number, returns an HTML Out prompt.
        """
        body = self.out_prompt % number
        return '<span class="out-prompt">%s</span>' % body

    #------ Payload handlers --------------------------------------------------

    # Payload handlers with a generic interface: each takes the opaque payload
    # dict, unpacks it and calls the underlying functions with the necessary
    # arguments.

    def _handle_payload_edit(self, item):
        self._edit(item['filename'], item['line_number'])

    def _handle_payload_exit(self, item):
        self._keep_kernel_on_exit = item['keepkernel']
        self.exit_requested.emit(self)

    def _handle_payload_next_input(self, item):
        self.input_buffer = dedent(item['text'].rstrip())

    def _handle_payload_page(self, item):
        # Since the plain text widget supports only a very small subset of HTML
        # and we have no control over the HTML source, we only page HTML
        # payloads in the rich text widget.
        if item['html'] and self.kind == 'rich':
            self._page(item['html'], html=True)
        else:
            self._page(item['text'], html=False)

    #------ Trait change handlers --------------------------------------------

    def _style_sheet_changed(self):
        """ Set the style sheets of the underlying widgets.
        """
        self.setStyleSheet(self.style_sheet)
        self._control.document().setDefaultStyleSheet(self.style_sheet)
        if self._page_control:
            self._page_control.document().setDefaultStyleSheet(
                self.style_sheet)

        bg_color = self._control.palette().window().color()
        self._ansi_processor.set_background_color(bg_color)

    def _syntax_style_changed(self):
        """ Set the style for the syntax highlighter.
        """
        if self._highlighter is None:
            # ignore premature calls
            return
        if self.syntax_style:
            self._highlighter.set_style(self.syntax_style)
        else:
            self._highlighter.set_style_sheet(self.style_sheet)

    #------ Trait default initializers -----------------------------------------

    def _banner_default(self):
        from IPython.core.usage import default_gui_banner
        return default_gui_banner
Esempio n. 23
0
class EngineFactory(RegistrationFactory):
    """IPython engine"""

    # configurables:
    out_stream_factory = Type('IPython.kernel.zmq.iostream.OutStream',
                              config=True,
                              help="""The OutStream for handling stdout/err.
        Typically 'IPython.kernel.zmq.iostream.OutStream'""")
    display_hook_factory = Type(
        'IPython.kernel.zmq.displayhook.ZMQDisplayHook',
        config=True,
        help="""The class for handling displayhook.
        Typically 'IPython.kernel.zmq.displayhook.ZMQDisplayHook'""")
    location = Unicode(
        config=True,
        help="""The location (an IP address) of the controller.  This is
        used for disambiguating URLs, to determine whether
        loopback should be used to connect or the public address.""")
    timeout = Float(
        5.0,
        config=True,
        help="""The time (in seconds) to wait for the Controller to respond
        to registration requests before giving up.""")
    max_heartbeat_misses = Integer(
        50,
        config=True,
        help="""The maximum number of times a check for the heartbeat ping of a 
        controller can be missed before shutting down the engine.
        
        If set to 0, the check is disabled.""")
    sshserver = Unicode(
        config=True,
        help=
        """The SSH server to use for tunneling connections to the Controller."""
    )
    sshkey = Unicode(
        config=True,
        help=
        """The SSH private key file to use when tunneling connections to the Controller."""
    )
    paramiko = Bool(
        sys.platform == 'win32',
        config=True,
        help="""Whether to use paramiko instead of openssh for tunnels.""")

    @property
    def tunnel_mod(self):
        from zmq.ssh import tunnel
        return tunnel

    # not configurable:
    connection_info = Dict()
    user_ns = Dict()
    id = Integer(allow_none=True)
    registrar = Instance('zmq.eventloop.zmqstream.ZMQStream')
    kernel = Instance(Kernel)
    hb_check_period = Integer()

    # States for the heartbeat monitoring
    # Initial values for monitored and pinged must satisfy "monitored > pinged == False" so that
    # during the first check no "missed" ping is reported. Must be floats for Python 3 compatibility.
    _hb_last_pinged = 0.0
    _hb_last_monitored = 0.0
    _hb_missed_beats = 0
    # The zmq Stream which receives the pings from the Heart
    _hb_listener = None

    bident = CBytes()
    ident = Unicode()

    def _ident_changed(self, name, old, new):
        self.bident = cast_bytes(new)

    using_ssh = Bool(False)

    def __init__(self, **kwargs):
        super(EngineFactory, self).__init__(**kwargs)
        self.ident = self.session.session

    def init_connector(self):
        """construct connection function, which handles tunnels."""
        self.using_ssh = bool(self.sshkey or self.sshserver)

        if self.sshkey and not self.sshserver:
            # We are using ssh directly to the controller, tunneling localhost to localhost
            self.sshserver = self.url.split('://')[1].split(':')[0]

        if self.using_ssh:
            if self.tunnel_mod.try_passwordless_ssh(self.sshserver,
                                                    self.sshkey,
                                                    self.paramiko):
                password = False
            else:
                password = getpass("SSH Password for %s: " % self.sshserver)
        else:
            password = False

        def connect(s, url):
            url = disambiguate_url(url, self.location)
            if self.using_ssh:
                self.log.debug("Tunneling connection to %s via %s", url,
                               self.sshserver)
                return self.tunnel_mod.tunnel_connection(
                    s,
                    url,
                    self.sshserver,
                    keyfile=self.sshkey,
                    paramiko=self.paramiko,
                    password=password,
                )
            else:
                return s.connect(url)

        def maybe_tunnel(url):
            """like connect, but don't complete the connection (for use by heartbeat)"""
            url = disambiguate_url(url, self.location)
            if self.using_ssh:
                self.log.debug("Tunneling connection to %s via %s", url,
                               self.sshserver)
                url, tunnelobj = self.tunnel_mod.open_tunnel(
                    url,
                    self.sshserver,
                    keyfile=self.sshkey,
                    paramiko=self.paramiko,
                    password=password,
                )
            return str(url)

        return connect, maybe_tunnel

    def register(self):
        """send the registration_request"""

        self.log.info("Registering with controller at %s" % self.url)
        ctx = self.context
        connect, maybe_tunnel = self.init_connector()
        reg = ctx.socket(zmq.DEALER)
        reg.setsockopt(zmq.IDENTITY, self.bident)
        connect(reg, self.url)
        self.registrar = zmqstream.ZMQStream(reg, self.loop)

        content = dict(uuid=self.ident)
        self.registrar.on_recv(
            lambda msg: self.complete_registration(msg, connect, maybe_tunnel))
        # print (self.session.key)
        self.session.send(self.registrar,
                          "registration_request",
                          content=content)

    def _report_ping(self, msg):
        """Callback for when the heartmonitor.Heart receives a ping"""
        #self.log.debug("Received a ping: %s", msg)
        self._hb_last_pinged = time.time()

    def complete_registration(self, msg, connect, maybe_tunnel):
        # print msg
        self.loop.remove_timeout(self._abort_timeout)
        ctx = self.context
        loop = self.loop
        identity = self.bident
        idents, msg = self.session.feed_identities(msg)
        msg = self.session.unserialize(msg)
        content = msg['content']
        info = self.connection_info

        def url(key):
            """get zmq url for given channel"""
            return str(info["interface"] + ":%i" % info[key])

        if content['status'] == 'ok':
            self.id = int(content['id'])

            # launch heartbeat
            # possibly forward hb ports with tunnels
            hb_ping = maybe_tunnel(url('hb_ping'))
            hb_pong = maybe_tunnel(url('hb_pong'))

            hb_monitor = None
            if self.max_heartbeat_misses > 0:
                # Add a monitor socket which will record the last time a ping was seen
                mon = self.context.socket(zmq.SUB)
                mport = mon.bind_to_random_port('tcp://%s' % localhost())
                mon.setsockopt(zmq.SUBSCRIBE, b"")
                self._hb_listener = zmqstream.ZMQStream(mon, self.loop)
                self._hb_listener.on_recv(self._report_ping)

                hb_monitor = "tcp://%s:%i" % (localhost(), mport)

            heart = Heart(hb_ping, hb_pong, hb_monitor, heart_id=identity)
            heart.start()

            # create Shell Connections (MUX, Task, etc.):
            shell_addrs = url('mux'), url('task')

            # Use only one shell stream for mux and tasks
            stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop)
            stream.setsockopt(zmq.IDENTITY, identity)
            shell_streams = [stream]
            for addr in shell_addrs:
                connect(stream, addr)

            # control stream:
            control_addr = url('control')
            control_stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop)
            control_stream.setsockopt(zmq.IDENTITY, identity)
            connect(control_stream, control_addr)

            # create iopub stream:
            iopub_addr = url('iopub')
            iopub_socket = ctx.socket(zmq.PUB)
            iopub_socket.setsockopt(zmq.IDENTITY, identity)
            connect(iopub_socket, iopub_addr)

            # disable history:
            self.config.HistoryManager.hist_file = ':memory:'

            # Redirect input streams and set a display hook.
            if self.out_stream_factory:
                sys.stdout = self.out_stream_factory(self.session,
                                                     iopub_socket, u'stdout')
                sys.stdout.topic = cast_bytes('engine.%i.stdout' % self.id)
                sys.stderr = self.out_stream_factory(self.session,
                                                     iopub_socket, u'stderr')
                sys.stderr.topic = cast_bytes('engine.%i.stderr' % self.id)
            if self.display_hook_factory:
                sys.displayhook = self.display_hook_factory(
                    self.session, iopub_socket)
                sys.displayhook.topic = cast_bytes('engine.%i.execute_result' %
                                                   self.id)

            self.kernel = Kernel(parent=self,
                                 int_id=self.id,
                                 ident=self.ident,
                                 session=self.session,
                                 control_stream=control_stream,
                                 shell_streams=shell_streams,
                                 iopub_socket=iopub_socket,
                                 loop=loop,
                                 user_ns=self.user_ns,
                                 log=self.log)

            self.kernel.shell.display_pub.topic = cast_bytes(
                'engine.%i.displaypub' % self.id)

            # periodically check the heartbeat pings of the controller
            # Should be started here and not in "start()" so that the right period can be taken
            # from the hubs HeartBeatMonitor.period
            if self.max_heartbeat_misses > 0:
                # Use a slightly bigger check period than the hub signal period to not warn unnecessary
                self.hb_check_period = int(content['hb_period']) + 10
                self.log.info(
                    "Starting to monitor the heartbeat signal from the hub every %i ms.",
                    self.hb_check_period)
                self._hb_reporter = ioloop.PeriodicCallback(
                    self._hb_monitor, self.hb_check_period, self.loop)
                self._hb_reporter.start()
            else:
                self.log.info(
                    "Monitoring of the heartbeat signal from the hub is not enabled."
                )

            # FIXME: This is a hack until IPKernelApp and IPEngineApp can be fully merged
            app = IPKernelApp(parent=self,
                              shell=self.kernel.shell,
                              kernel=self.kernel,
                              log=self.log)
            app.init_profile_dir()
            app.init_code()

            self.kernel.start()
        else:
            self.log.fatal("Registration Failed: %s" % msg)
            raise Exception("Registration Failed: %s" % msg)

        self.log.info("Completed registration with id %i" % self.id)

    def abort(self):
        self.log.fatal("Registration timed out after %.1f seconds" %
                       self.timeout)
        if self.url.startswith('127.'):
            self.log.fatal("""
            If the controller and engines are not on the same machine,
            you will have to instruct the controller to listen on an external IP (in ipcontroller_config.py):
                c.HubFactory.ip='*' # for all interfaces, internal and external
                c.HubFactory.ip='192.168.1.101' # or any interface that the engines can see
            or tunnel connections via ssh.
            """)
        self.session.send(self.registrar,
                          "unregistration_request",
                          content=dict(id=self.id))
        time.sleep(1)
        sys.exit(255)

    def _hb_monitor(self):
        """Callback to monitor the heartbeat from the controller"""
        self._hb_listener.flush()
        if self._hb_last_monitored > self._hb_last_pinged:
            self._hb_missed_beats += 1
            self.log.warn(
                "No heartbeat in the last %s ms (%s time(s) in a row).",
                self.hb_check_period, self._hb_missed_beats)
        else:
            #self.log.debug("Heartbeat received (after missing %s beats).", self._hb_missed_beats)
            self._hb_missed_beats = 0

        if self._hb_missed_beats >= self.max_heartbeat_misses:
            self.log.fatal(
                "Maximum number of heartbeats misses reached (%s times %s ms), shutting down.",
                self.max_heartbeat_misses, self.hb_check_period)
            self.session.send(self.registrar,
                              "unregistration_request",
                              content=dict(id=self.id))
            self.loop.stop()

        self._hb_last_monitored = time.time()

    def start(self):
        loop = self.loop

        def _start():
            self.register()
            self._abort_timeout = loop.add_timeout(loop.time() + self.timeout,
                                                   self.abort)

        self.loop.add_callback(_start)
class UnicodeTrait(HasTraits):

    value = Unicode(u'unicode')
Esempio n. 25
0
class HistoryManager(HistoryAccessor):
    """A class to organize all history-related functionality in one place.
    """
    # Public interface

    # An instance of the IPython shell we are attached to
    shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
    # Lists to hold processed and raw history. These start with a blank entry
    # so that we can index them starting from 1
    input_hist_parsed = List([""])
    input_hist_raw = List([""])
    # A list of directories visited during session
    dir_hist = List()

    def _dir_hist_default(self):
        try:
            return [py3compat.getcwd()]
        except OSError:
            return []

    # A dict of output history, keyed with ints from the shell's
    # execution count.
    output_hist = Dict()
    # The text/plain repr of outputs.
    output_hist_reprs = Dict()

    # The number of the current session in the history database
    session_number = Integer()
    # Should we log output to the database? (default no)
    db_log_output = Bool(False, config=True)
    # Write to database every x commands (higher values save disk access & power)
    #  Values of 1 or less effectively disable caching.
    db_cache_size = Integer(0, config=True)
    # The input and output caches
    db_input_cache = List()
    db_output_cache = List()

    # History saving in separate thread
    save_thread = Instance('IPython.core.history.HistorySavingThread')
    try:  # Event is a function returning an instance of _Event...
        save_flag = Instance(threading._Event)
    except AttributeError:  # ...until Python 3.3, when it's a class.
        save_flag = Instance(threading.Event)

    # Private interface
    # Variables used to store the three last inputs from the user.  On each new
    # history update, we populate the user's namespace with these, shifted as
    # necessary.
    _i00 = Unicode(u'')
    _i = Unicode(u'')
    _ii = Unicode(u'')
    _iii = Unicode(u'')

    # A regex matching all forms of the exit command, so that we don't store
    # them in the history (it's annoying to rewind the first entry and land on
    # an exit call).
    _exit_re = re.compile(r"(exit|quit)(\s*\(.*\))?$")

    def __init__(self, shell=None, config=None, **traits):
        """Create a new history manager associated with a shell instance.
        """
        # We need a pointer back to the shell for various tasks.
        super(HistoryManager, self).__init__(shell=shell,
                                             config=config,
                                             **traits)
        self.save_flag = threading.Event()
        self.db_input_cache_lock = threading.Lock()
        self.db_output_cache_lock = threading.Lock()
        if self.enabled and self.hist_file != ':memory:':
            self.save_thread = HistorySavingThread(self)
            self.save_thread.start()

        self.new_session()

    def _get_hist_file_name(self, profile=None):
        """Get default history file name based on the Shell's profile.
        
        The profile parameter is ignored, but must exist for compatibility with
        the parent class."""
        profile_dir = self.shell.profile_dir.location
        return os.path.join(profile_dir, 'history.sqlite')

    @needs_sqlite
    def new_session(self, conn=None):
        """Get a new session number."""
        if conn is None:
            conn = self.db

        with conn:
            cur = conn.execute(
                """INSERT INTO sessions VALUES (NULL, ?, NULL,
                            NULL, "") """, (datetime.datetime.now(), ))
            self.session_number = cur.lastrowid

    def end_session(self):
        """Close the database session, filling in the end time and line count."""
        self.writeout_cache()
        with self.db:
            self.db.execute(
                """UPDATE sessions SET end=?, num_cmds=? WHERE
                            session==?""",
                (datetime.datetime.now(), len(self.input_hist_parsed) - 1,
                 self.session_number))
        self.session_number = 0

    def name_session(self, name):
        """Give the current session a name in the history database."""
        with self.db:
            self.db.execute("UPDATE sessions SET remark=? WHERE session==?",
                            (name, self.session_number))

    def reset(self, new_session=True):
        """Clear the session history, releasing all object references, and
        optionally open a new session."""
        self.output_hist.clear()
        # The directory history can't be completely empty
        self.dir_hist[:] = [py3compat.getcwd()]

        if new_session:
            if self.session_number:
                self.end_session()
            self.input_hist_parsed[:] = [""]
            self.input_hist_raw[:] = [""]
            self.new_session()

    # ------------------------------
    # Methods for retrieving history
    # ------------------------------
    def _get_range_session(self, start=1, stop=None, raw=True, output=False):
        """Get input and output history from the current session. Called by
        get_range, and takes similar parameters."""
        input_hist = self.input_hist_raw if raw else self.input_hist_parsed

        n = len(input_hist)
        if start < 0:
            start += n
        if not stop or (stop > n):
            stop = n
        elif stop < 0:
            stop += n

        for i in range(start, stop):
            if output:
                line = (input_hist[i], self.output_hist_reprs.get(i))
            else:
                line = input_hist[i]
            yield (0, i, line)

    def get_range(self, session=0, start=1, stop=None, raw=True, output=False):
        """Retrieve input by session.
        
        Parameters
        ----------
        session : int
            Session number to retrieve. The current session is 0, and negative
            numbers count back from current session, so -1 is previous session.
        start : int
            First line to retrieve.
        stop : int
            End of line range (excluded from output itself). If None, retrieve
            to the end of the session.
        raw : bool
            If True, return untranslated input
        output : bool
            If True, attempt to include output. This will be 'real' Python
            objects for the current session, or text reprs from previous
            sessions if db_log_output was enabled at the time. Where no output
            is found, None is used.
            
        Returns
        -------
        An iterator over the desired lines. Each line is a 3-tuple, either
        (session, line, input) if output is False, or
        (session, line, (input, output)) if output is True.
        """
        if session <= 0:
            session += self.session_number
        if session == self.session_number:  # Current session
            return self._get_range_session(start, stop, raw, output)
        return super(HistoryManager, self).get_range(session, start, stop, raw,
                                                     output)

    ## ----------------------------
    ## Methods for storing history:
    ## ----------------------------
    def store_inputs(self, line_num, source, source_raw=None):
        """Store source and raw input in history and create input cache
        variables _i*.

        Parameters
        ----------
        line_num : int
          The prompt number of this input.

        source : str
          Python input.

        source_raw : str, optional
          If given, this is the raw input without any IPython transformations
          applied to it.  If not given, ``source`` is used.
        """
        if source_raw is None:
            source_raw = source
        source = source.rstrip('\n')
        source_raw = source_raw.rstrip('\n')

        # do not store exit/quit commands
        if self._exit_re.match(source_raw.strip()):
            return

        self.input_hist_parsed.append(source)
        self.input_hist_raw.append(source_raw)

        with self.db_input_cache_lock:
            self.db_input_cache.append((line_num, source, source_raw))
            # Trigger to flush cache and write to DB.
            if len(self.db_input_cache) >= self.db_cache_size:
                self.save_flag.set()

        # update the auto _i variables
        self._iii = self._ii
        self._ii = self._i
        self._i = self._i00
        self._i00 = source_raw

        # hackish access to user namespace to create _i1,_i2... dynamically
        new_i = '_i%s' % line_num
        to_main = {
            '_i': self._i,
            '_ii': self._ii,
            '_iii': self._iii,
            new_i: self._i00
        }

        if self.shell is not None:
            self.shell.push(to_main, interactive=False)

    def store_output(self, line_num):
        """If database output logging is enabled, this saves all the
        outputs from the indicated prompt number to the database. It's
        called by run_cell after code has been executed.

        Parameters
        ----------
        line_num : int
          The line number from which to save outputs
        """
        if (not self.db_log_output) or (line_num
                                        not in self.output_hist_reprs):
            return
        output = self.output_hist_reprs[line_num]

        with self.db_output_cache_lock:
            self.db_output_cache.append((line_num, output))
        if self.db_cache_size <= 1:
            self.save_flag.set()

    def _writeout_input_cache(self, conn):
        with conn:
            for line in self.db_input_cache:
                conn.execute("INSERT INTO history VALUES (?, ?, ?, ?)",
                             (self.session_number, ) + line)

    def _writeout_output_cache(self, conn):
        with conn:
            for line in self.db_output_cache:
                conn.execute("INSERT INTO output_history VALUES (?, ?, ?)",
                             (self.session_number, ) + line)

    @needs_sqlite
    def writeout_cache(self, conn=None):
        """Write any entries in the cache to the database."""
        if conn is None:
            conn = self.db

        with self.db_input_cache_lock:
            try:
                self._writeout_input_cache(conn)
            except sqlite3.IntegrityError:
                self.new_session(conn)
                print("ERROR! Session/line number was not unique in",
                      "database. History logging moved to new session",
                      self.session_number)
                try:
                    # Try writing to the new session. If this fails, don't
                    # recurse
                    self._writeout_input_cache(conn)
                except sqlite3.IntegrityError:
                    pass
            finally:
                self.db_input_cache = []

        with self.db_output_cache_lock:
            try:
                self._writeout_output_cache(conn)
            except sqlite3.IntegrityError:
                print("!! Session/line number for output was not unique",
                      "in database. Output will not be stored.")
            finally:
                self.db_output_cache = []
Esempio n. 26
0
import subprocess

from IPython.utils.tempdir import TemporaryDirectory
from IPython.utils.traitlets import Unicode

from .convertfigures import ConvertFiguresTransformer



INKSCAPE_APP = '/Applications/Inkscape.app/Contents/Resources/bin/inkscape'
class  SVG2PDFTransformer (ConvertFiguresTransformer) :
	"""
    Converts all of the outputs in a notebook from SVG to PDF.
    """
	
    from_format = Unicode('svg', config=True, help='Format the converter accepts')
	    to_format = Unicode('pdf', config=False, help='Format the converter writes')
	    
    command = Unicode(config=True,        help="""The command to use for converting SVG to PDF
        
        This string is a template, which will be formatted with the keys
        to_filename and from_filename.
        
        The conversion call must read the SVG from {from_flename},
        and write a PDF to {to_filename}.
        """)
	    
    def _command_default(self):
        return self.inkscape + \
               ' --without-gui --export-pdf="{to_filename}" "{from_filename}"'
    
Esempio n. 27
0
class FrontendWidget(HistoryConsoleWidget, BaseFrontendMixin):
    """ A Qt frontend for a generic Python kernel.
    """

    # The text to show when the kernel is (re)started.
    banner = Unicode()

    # An option and corresponding signal for overriding the default kernel
    # interrupt behavior.
    custom_interrupt = Bool(False)
    custom_interrupt_requested = QtCore.Signal()

    # An option and corresponding signals for overriding the default kernel
    # restart behavior.
    custom_restart = Bool(False)
    custom_restart_kernel_died = QtCore.Signal(float)
    custom_restart_requested = QtCore.Signal()

    # Whether to automatically show calltips on open-parentheses.
    enable_calltips = Bool(
        True,
        config=True,
        help="Whether to draw information calltips on open-parentheses.")

    clear_on_kernel_restart = Bool(
        True,
        config=True,
        help="Whether to clear the console when the kernel is restarted")

    confirm_restart = Bool(
        True,
        config=True,
        help="Whether to ask for user confirmation when restarting kernel")

    # Emitted when a user visible 'execute_request' has been submitted to the
    # kernel from the FrontendWidget. Contains the code to be executed.
    executing = QtCore.Signal(object)

    # Emitted when a user-visible 'execute_reply' has been received from the
    # kernel and processed by the FrontendWidget. Contains the response message.
    executed = QtCore.Signal(object)

    # Emitted when an exit request has been received from the kernel.
    exit_requested = QtCore.Signal(object)

    # Protected class variables.
    _prompt_transformer = IPythonInputSplitter(
        physical_line_transforms=[classic_prompt()],
        logical_line_transforms=[],
        python_line_transforms=[],
    )
    _CallTipRequest = namedtuple('_CallTipRequest', ['id', 'pos'])
    _CompletionRequest = namedtuple('_CompletionRequest', ['id', 'pos'])
    _ExecutionRequest = namedtuple('_ExecutionRequest', ['id', 'kind'])
    _input_splitter_class = InputSplitter
    _local_kernel = False
    _highlighter = Instance(FrontendHighlighter)

    #---------------------------------------------------------------------------
    # 'object' interface
    #---------------------------------------------------------------------------

    def __init__(self, *args, **kw):
        super(FrontendWidget, self).__init__(*args, **kw)
        # FIXME: remove this when PySide min version is updated past 1.0.7
        # forcefully disable calltips if PySide is < 1.0.7, because they crash
        if qt.QT_API == qt.QT_API_PYSIDE:
            import PySide
            if PySide.__version_info__ < (1, 0, 7):
                self.log.warn(
                    "PySide %s < 1.0.7 detected, disabling calltips" %
                    PySide.__version__)
                self.enable_calltips = False

        # FrontendWidget protected variables.
        self._bracket_matcher = BracketMatcher(self._control)
        self._call_tip_widget = CallTipWidget(self._control)
        self._completion_lexer = CompletionLexer(PythonLexer())
        self._copy_raw_action = QtGui.QAction('Copy (Raw Text)', None)
        self._hidden = False
        self._highlighter = FrontendHighlighter(self)
        self._input_splitter = self._input_splitter_class()
        self._kernel_manager = None
        self._kernel_client = None
        self._request_info = {}
        self._request_info['execute'] = {}
        self._callback_dict = {}

        # Configure the ConsoleWidget.
        self.tab_width = 4
        self._set_continuation_prompt('... ')

        # Configure the CallTipWidget.
        self._call_tip_widget.setFont(self.font)
        self.font_changed.connect(self._call_tip_widget.setFont)

        # Configure actions.
        action = self._copy_raw_action
        key = QtCore.Qt.CTRL | QtCore.Qt.SHIFT | QtCore.Qt.Key_C
        action.setEnabled(False)
        action.setShortcut(QtGui.QKeySequence(key))
        action.setShortcutContext(QtCore.Qt.WidgetWithChildrenShortcut)
        action.triggered.connect(self.copy_raw)
        self.copy_available.connect(action.setEnabled)
        self.addAction(action)

        # Connect signal handlers.
        document = self._control.document()
        document.contentsChange.connect(self._document_contents_change)

        # Set flag for whether we are connected via localhost.
        self._local_kernel = kw.get('local_kernel',
                                    FrontendWidget._local_kernel)

    #---------------------------------------------------------------------------
    # 'ConsoleWidget' public interface
    #---------------------------------------------------------------------------

    def copy(self):
        """ Copy the currently selected text to the clipboard, removing prompts.
        """
        if self._page_control is not None and self._page_control.hasFocus():
            self._page_control.copy()
        elif self._control.hasFocus():
            text = self._control.textCursor().selection().toPlainText()
            if text:
                text = self._prompt_transformer.transform_cell(text)
                QtGui.QApplication.clipboard().setText(text)
        else:
            self.log.debug("frontend widget : unknown copy target")

    #---------------------------------------------------------------------------
    # 'ConsoleWidget' abstract interface
    #---------------------------------------------------------------------------

    def _is_complete(self, source, interactive):
        """ Returns whether 'source' can be completely processed and a new
            prompt created. When triggered by an Enter/Return key press,
            'interactive' is True; otherwise, it is False.
        """
        self._input_splitter.reset()
        complete = self._input_splitter.push(source)
        if interactive:
            complete = not self._input_splitter.push_accepts_more()
        return complete

    def _execute(self, source, hidden):
        """ Execute 'source'. If 'hidden', do not show any output.

        See parent class :meth:`execute` docstring for full details.
        """
        msg_id = self.kernel_client.execute(source, hidden)
        self._request_info['execute'][msg_id] = self._ExecutionRequest(
            msg_id, 'user')
        self._hidden = hidden
        if not hidden:
            self.executing.emit(source)

    def _prompt_started_hook(self):
        """ Called immediately after a new prompt is displayed.
        """
        if not self._reading:
            self._highlighter.highlighting_on = True

    def _prompt_finished_hook(self):
        """ Called immediately after a prompt is finished, i.e. when some input
            will be processed and a new prompt displayed.
        """
        # Flush all state from the input splitter so the next round of
        # reading input starts with a clean buffer.
        self._input_splitter.reset()

        if not self._reading:
            self._highlighter.highlighting_on = False

    def _tab_pressed(self):
        """ Called when the tab key is pressed. Returns whether to continue
            processing the event.
        """
        # Perform tab completion if:
        # 1) The cursor is in the input buffer.
        # 2) There is a non-whitespace character before the cursor.
        text = self._get_input_buffer_cursor_line()
        if text is None:
            return False
        complete = bool(text[:self._get_input_buffer_cursor_column()].strip())
        if complete:
            self._complete()
        return not complete

    #---------------------------------------------------------------------------
    # 'ConsoleWidget' protected interface
    #---------------------------------------------------------------------------

    def _context_menu_make(self, pos):
        """ Reimplemented to add an action for raw copy.
        """
        menu = super(FrontendWidget, self)._context_menu_make(pos)
        for before_action in menu.actions():
            if before_action.shortcut().matches(QtGui.QKeySequence.Paste) == \
                    QtGui.QKeySequence.ExactMatch:
                menu.insertAction(before_action, self._copy_raw_action)
                break
        return menu

    def request_interrupt_kernel(self):
        if self._executing:
            self.interrupt_kernel()

    def request_restart_kernel(self):
        message = 'Are you sure you want to restart the kernel?'
        self.restart_kernel(message, now=False)

    def _event_filter_console_keypress(self, event):
        """ Reimplemented for execution interruption and smart backspace.
        """
        key = event.key()
        if self._control_key_down(event.modifiers(), include_command=False):

            if key == QtCore.Qt.Key_C and self._executing:
                self.request_interrupt_kernel()
                return True

            elif key == QtCore.Qt.Key_Period:
                self.request_restart_kernel()
                return True

        elif not event.modifiers() & QtCore.Qt.AltModifier:

            # Smart backspace: remove four characters in one backspace if:
            # 1) everything left of the cursor is whitespace
            # 2) the four characters immediately left of the cursor are spaces
            if key == QtCore.Qt.Key_Backspace:
                col = self._get_input_buffer_cursor_column()
                cursor = self._control.textCursor()
                if col > 3 and not cursor.hasSelection():
                    text = self._get_input_buffer_cursor_line()[:col]
                    if text.endswith('    ') and not text.strip():
                        cursor.movePosition(QtGui.QTextCursor.Left,
                                            QtGui.QTextCursor.KeepAnchor, 4)
                        cursor.removeSelectedText()
                        return True

        return super(FrontendWidget,
                     self)._event_filter_console_keypress(event)

    def _insert_continuation_prompt(self, cursor):
        """ Reimplemented for auto-indentation.
        """
        super(FrontendWidget, self)._insert_continuation_prompt(cursor)
        cursor.insertText(' ' * self._input_splitter.indent_spaces)

    #---------------------------------------------------------------------------
    # 'BaseFrontendMixin' abstract interface
    #---------------------------------------------------------------------------

    def _handle_complete_reply(self, rep):
        """ Handle replies for tab completion.
        """
        self.log.debug("complete: %s", rep.get('content', ''))
        cursor = self._get_cursor()
        info = self._request_info.get('complete')
        if info and info.id == rep['parent_header']['msg_id'] and \
                info.pos == cursor.position():
            text = '.'.join(self._get_context())
            cursor.movePosition(QtGui.QTextCursor.Left, n=len(text))
            self._complete_with_items(cursor, rep['content']['matches'])

    def _silent_exec_callback(self, expr, callback):
        """Silently execute `expr` in the kernel and call `callback` with reply

        the `expr` is evaluated silently in the kernel (without) output in
        the frontend. Call `callback` with the
        `repr <http://docs.python.org/library/functions.html#repr> `_ as first argument

        Parameters
        ----------
        expr : string
            valid string to be executed by the kernel.
        callback : function
            function accepting one argument, as a string. The string will be
            the `repr` of the result of evaluating `expr`

        The `callback` is called with the `repr()` of the result of `expr` as
        first argument. To get the object, do `eval()` on the passed value.

        See Also
        --------
        _handle_exec_callback : private method, deal with calling callback with reply

        """

        # generate uuid, which would be used as an indication of whether or
        # not the unique request originated from here (can use msg id ?)
        local_uuid = str(uuid.uuid1())
        msg_id = self.kernel_client.execute(
            '', silent=True, user_expressions={local_uuid: expr})
        self._callback_dict[local_uuid] = callback
        self._request_info['execute'][msg_id] = self._ExecutionRequest(
            msg_id, 'silent_exec_callback')

    def _handle_exec_callback(self, msg):
        """Execute `callback` corresponding to `msg` reply, after ``_silent_exec_callback``

        Parameters
        ----------
        msg : raw message send by the kernel containing an `user_expressions`
                and having a 'silent_exec_callback' kind.

        Notes
        -----
        This function will look for a `callback` associated with the
        corresponding message id. Association has been made by
        `_silent_exec_callback`. `callback` is then called with the `repr()`
        of the value of corresponding `user_expressions` as argument.
        `callback` is then removed from the known list so that any message
        coming again with the same id won't trigger it.

        """

        user_exp = msg['content'].get('user_expressions')
        if not user_exp:
            return
        for expression in user_exp:
            if expression in self._callback_dict:
                self._callback_dict.pop(expression)(user_exp[expression])

    def _handle_execute_reply(self, msg):
        """ Handles replies for code execution.
        """
        self.log.debug("execute: %s", msg.get('content', ''))
        msg_id = msg['parent_header']['msg_id']
        info = self._request_info['execute'].get(msg_id)
        # unset reading flag, because if execute finished, raw_input can't
        # still be pending.
        self._reading = False
        if info and info.kind == 'user' and not self._hidden:
            # Make sure that all output from the SUB channel has been processed
            # before writing a new prompt.
            self.kernel_client.iopub_channel.flush()

            # Reset the ANSI style information to prevent bad text in stdout
            # from messing up our colors. We're not a true terminal so we're
            # allowed to do this.
            if self.ansi_codes:
                self._ansi_processor.reset_sgr()

            content = msg['content']
            status = content['status']
            if status == 'ok':
                self._process_execute_ok(msg)
            elif status == 'error':
                self._process_execute_error(msg)
            elif status == 'aborted':
                self._process_execute_abort(msg)

            self._show_interpreter_prompt_for_reply(msg)
            self.executed.emit(msg)
            self._request_info['execute'].pop(msg_id)
        elif info and info.kind == 'silent_exec_callback' and not self._hidden:
            self._handle_exec_callback(msg)
            self._request_info['execute'].pop(msg_id)
        else:
            super(FrontendWidget, self)._handle_execute_reply(msg)

    def _handle_input_request(self, msg):
        """ Handle requests for raw_input.
        """
        self.log.debug("input: %s", msg.get('content', ''))
        if self._hidden:
            raise RuntimeError(
                'Request for raw input during hidden execution.')

        # Make sure that all output from the SUB channel has been processed
        # before entering readline mode.
        self.kernel_client.iopub_channel.flush()

        def callback(line):
            self.kernel_client.stdin_channel.input(line)

        if self._reading:
            self.log.debug(
                "Got second input request, assuming first was interrupted.")
            self._reading = False
        self._readline(msg['content']['prompt'], callback=callback)

    def _kernel_restarted_message(self, died=True):
        msg = "Kernel died, restarting" if died else "Kernel restarting"
        self._append_html("<br>%s<hr><br>" % msg, before_prompt=False)

    def _handle_kernel_died(self, since_last_heartbeat):
        """Handle the kernel's death (if we do not own the kernel).
        """
        self.log.warn("kernel died: %s", since_last_heartbeat)
        if self.custom_restart:
            self.custom_restart_kernel_died.emit(since_last_heartbeat)
        else:
            self._kernel_restarted_message(died=True)
            self.reset()

    def _handle_kernel_restarted(self, died=True):
        """Notice that the autorestarter restarted the kernel.

        There's nothing to do but show a message.
        """
        self.log.warn("kernel restarted")
        self._kernel_restarted_message(died=died)
        self.reset()

    def _handle_object_info_reply(self, rep):
        """ Handle replies for call tips.
        """
        self.log.debug("oinfo: %s", rep.get('content', ''))
        cursor = self._get_cursor()
        info = self._request_info.get('call_tip')
        if info and info.id == rep['parent_header']['msg_id'] and \
                info.pos == cursor.position():
            # Get the information for a call tip.  For now we format the call
            # line as string, later we can pass False to format_call and
            # syntax-highlight it ourselves for nicer formatting in the
            # calltip.
            content = rep['content']
            # if this is from pykernel, 'docstring' will be the only key
            if content.get('ismagic', False):
                # Don't generate a call-tip for magics. Ideally, we should
                # generate a tooltip, but not on ( like we do for actual
                # callables.
                call_info, doc = None, None
            else:
                call_info, doc = call_tip(content, format_call=True)
            if call_info or doc:
                self._call_tip_widget.show_call_info(call_info, doc)

    def _handle_pyout(self, msg):
        """ Handle display hook output.
        """
        self.log.debug("pyout: %s", msg.get('content', ''))
        if not self._hidden and self._is_from_this_session(msg):
            text = msg['content']['data']
            self._append_plain_text(text + '\n', before_prompt=True)

    def _handle_stream(self, msg):
        """ Handle stdout, stderr, and stdin.
        """
        self.log.debug("stream: %s", msg.get('content', ''))
        if not self._hidden and self._is_from_this_session(msg):
            # Most consoles treat tabs as being 8 space characters. Convert tabs
            # to spaces so that output looks as expected regardless of this
            # widget's tab width.
            text = msg['content']['data'].expandtabs(8)

            self._append_plain_text(text, before_prompt=True)
            self._control.moveCursor(QtGui.QTextCursor.End)

    def _handle_shutdown_reply(self, msg):
        """ Handle shutdown signal, only if from other console.
        """
        self.log.warn("shutdown: %s", msg.get('content', ''))
        restart = msg.get('content', {}).get('restart', False)
        if not self._hidden and not self._is_from_this_session(msg):
            # got shutdown reply, request came from session other than ours
            if restart:
                # someone restarted the kernel, handle it
                self._handle_kernel_restarted(died=False)
            else:
                # kernel was shutdown permanently
                # this triggers exit_requested if the kernel was local,
                # and a dialog if the kernel was remote,
                # so we don't suddenly clear the qtconsole without asking.
                if self._local_kernel:
                    self.exit_requested.emit(self)
                else:
                    title = self.window().windowTitle()
                    reply = QtGui.QMessageBox.question(
                        self, title, "Kernel has been shutdown permanently. "
                        "Close the Console?", QtGui.QMessageBox.Yes,
                        QtGui.QMessageBox.No)
                    if reply == QtGui.QMessageBox.Yes:
                        self.exit_requested.emit(self)

    def _handle_status(self, msg):
        """Handle status message"""
        # This is where a busy/idle indicator would be triggered,
        # when we make one.
        state = msg['content'].get('execution_state', '')
        if state == 'starting':
            # kernel started while we were running
            if self._executing:
                self._handle_kernel_restarted(died=True)
        elif state == 'idle':
            pass
        elif state == 'busy':
            pass

    def _started_channels(self):
        """ Called when the KernelManager channels have started listening or
            when the frontend is assigned an already listening KernelManager.
        """
        self.reset(clear=True)

    #---------------------------------------------------------------------------
    # 'FrontendWidget' public interface
    #---------------------------------------------------------------------------

    def copy_raw(self):
        """ Copy the currently selected text to the clipboard without attempting
            to remove prompts or otherwise alter the text.
        """
        self._control.copy()

    def execute_file(self, path, hidden=False):
        """ Attempts to execute file with 'path'. If 'hidden', no output is
            shown.
        """
        self.execute('execfile(%r)' % path, hidden=hidden)

    def interrupt_kernel(self):
        """ Attempts to interrupt the running kernel.
        
        Also unsets _reading flag, to avoid runtime errors
        if raw_input is called again.
        """
        if self.custom_interrupt:
            self._reading = False
            self.custom_interrupt_requested.emit()
        elif self.kernel_manager:
            self._reading = False
            self.kernel_manager.interrupt_kernel()
        else:
            self._append_plain_text(
                'Cannot interrupt a kernel I did not start.\n')

    def reset(self, clear=False):
        """ Resets the widget to its initial state if ``clear`` parameter
        is True, otherwise
        prints a visual indication of the fact that the kernel restarted, but
        does not clear the traces from previous usage of the kernel before it
        was restarted.  With ``clear=True``, it is similar to ``%clear``, but
        also re-writes the banner and aborts execution if necessary.
        """
        if self._executing:
            self._executing = False
            self._request_info['execute'] = {}
        self._reading = False
        self._highlighter.highlighting_on = False

        if clear:
            self._control.clear()
            self._append_plain_text(self.banner)
        # update output marker for stdout/stderr, so that startup
        # messages appear after banner:
        self._append_before_prompt_pos = self._get_cursor().position()
        self._show_interpreter_prompt()

    def restart_kernel(self, message, now=False):
        """ Attempts to restart the running kernel.
        """
        # FIXME: now should be configurable via a checkbox in the dialog.  Right
        # now at least the heartbeat path sets it to True and the manual restart
        # to False.  But those should just be the pre-selected states of a
        # checkbox that the user could override if so desired.  But I don't know
        # enough Qt to go implementing the checkbox now.

        if self.custom_restart:
            self.custom_restart_requested.emit()
            return

        if self.kernel_manager:
            # Pause the heart beat channel to prevent further warnings.
            self.kernel_client.hb_channel.pause()

            # Prompt the user to restart the kernel. Un-pause the heartbeat if
            # they decline. (If they accept, the heartbeat will be un-paused
            # automatically when the kernel is restarted.)
            if self.confirm_restart:
                buttons = QtGui.QMessageBox.Yes | QtGui.QMessageBox.No
                result = QtGui.QMessageBox.question(self, 'Restart kernel?',
                                                    message, buttons)
                do_restart = result == QtGui.QMessageBox.Yes
            else:
                # confirm_restart is False, so we don't need to ask user
                # anything, just do the restart
                do_restart = True
            if do_restart:
                try:
                    self.kernel_manager.restart_kernel(now=now)
                except RuntimeError as e:
                    self._append_plain_text('Error restarting kernel: %s\n' %
                                            e,
                                            before_prompt=True)
                else:
                    self._append_html(
                        "<br>Restarting kernel...\n<hr><br>",
                        before_prompt=True,
                    )
            else:
                self.kernel_client.hb_channel.unpause()

        else:
            self._append_plain_text(
                'Cannot restart a Kernel I did not start\n',
                before_prompt=True)

    #---------------------------------------------------------------------------
    # 'FrontendWidget' protected interface
    #---------------------------------------------------------------------------

    def _call_tip(self):
        """ Shows a call tip, if appropriate, at the current cursor location.
        """
        # Decide if it makes sense to show a call tip
        if not self.enable_calltips:
            return False
        cursor = self._get_cursor()
        cursor.movePosition(QtGui.QTextCursor.Left)
        if cursor.document().characterAt(cursor.position()) != '(':
            return False
        context = self._get_context(cursor)
        if not context:
            return False

        # Send the metadata request to the kernel
        name = '.'.join(context)
        msg_id = self.kernel_client.object_info(name)
        pos = self._get_cursor().position()
        self._request_info['call_tip'] = self._CallTipRequest(msg_id, pos)
        return True

    def _complete(self):
        """ Performs completion at the current cursor location.
        """
        context = self._get_context()
        if context:
            # Send the completion request to the kernel
            msg_id = self.kernel_client.complete(
                '.'.join(context),  # text
                self._get_input_buffer_cursor_line(),  # line
                self._get_input_buffer_cursor_column(),  # cursor_pos
                self.input_buffer)  # block
            pos = self._get_cursor().position()
            info = self._CompletionRequest(msg_id, pos)
            self._request_info['complete'] = info

    def _get_context(self, cursor=None):
        """ Gets the context for the specified cursor (or the current cursor
            if none is specified).
        """
        if cursor is None:
            cursor = self._get_cursor()
        cursor.movePosition(QtGui.QTextCursor.StartOfBlock,
                            QtGui.QTextCursor.KeepAnchor)
        text = cursor.selection().toPlainText()
        return self._completion_lexer.get_context(text)

    def _process_execute_abort(self, msg):
        """ Process a reply for an aborted execution request.
        """
        self._append_plain_text("ERROR: execution aborted\n")

    def _process_execute_error(self, msg):
        """ Process a reply for an execution request that resulted in an error.
        """
        content = msg['content']
        # If a SystemExit is passed along, this means exit() was called - also
        # all the ipython %exit magic syntax of '-k' to be used to keep
        # the kernel running
        if content['ename'] == 'SystemExit':
            keepkernel = content['evalue'] == '-k' or content[
                'evalue'] == 'True'
            self._keep_kernel_on_exit = keepkernel
            self.exit_requested.emit(self)
        else:
            traceback = ''.join(content['traceback'])
            self._append_plain_text(traceback)

    def _process_execute_ok(self, msg):
        """ Process a reply for a successful execution request.
        """
        payload = msg['content']['payload']
        for item in payload:
            if not self._process_execute_payload(item):
                warning = 'Warning: received unknown payload of type %s'
                print(warning % repr(item['source']))

    def _process_execute_payload(self, item):
        """ Process a single payload item from the list of payload items in an
            execution reply. Returns whether the payload was handled.
        """
        # The basic FrontendWidget doesn't handle payloads, as they are a
        # mechanism for going beyond the standard Python interpreter model.
        return False

    def _show_interpreter_prompt(self):
        """ Shows a prompt for the interpreter.
        """
        self._show_prompt('>>> ')

    def _show_interpreter_prompt_for_reply(self, msg):
        """ Shows a prompt for the interpreter given an 'execute_reply' message.
        """
        self._show_interpreter_prompt()

    #------ Signal handlers ----------------------------------------------------

    def _document_contents_change(self, position, removed, added):
        """ Called whenever the document's content changes. Display a call tip
            if appropriate.
        """
        # Calculate where the cursor should be *after* the change:
        position += added

        document = self._control.document()
        if position == self._get_cursor().position():
            self._call_tip()

    #------ Trait default initializers -----------------------------------------

    def _banner_default(self):
        """ Returns the standard Python banner.
        """
        banner = 'Python %s on %s\nType "help", "copyright", "credits" or ' \
            '"license" for more information.'
        return banner % (sys.version, sys.platform)
Esempio n. 28
0
class IPKernelApp(BaseIPythonApplication, InteractiveShellApp):
    name = 'ipkernel'
    aliases = Dict(kernel_aliases)
    flags = Dict(kernel_flags)
    classes = [Kernel, ZMQInteractiveShell, ProfileDir, Session]
    # the kernel class, as an importstring
    kernel_class = DottedObjectName('IPython.kernel.zmq.ipkernel.Kernel',
                                    config=True,
                                    help="""The Kernel subclass to be used.
    
    This should allow easy re-use of the IPKernelApp entry point
    to configure and launch kernels other than IPython's own.
    """)
    kernel = Any()
    poller = Any(
    )  # don't restrict this even though current pollers are all Threads
    heartbeat = Instance(Heartbeat)
    session = Instance('IPython.kernel.zmq.session.Session')
    ports = Dict()

    # ipkernel doesn't get its own config file
    def _config_file_name_default(self):
        return 'ipython_config.py'

    # inherit config file name from parent:
    parent_appname = Unicode(config=True)

    def _parent_appname_changed(self, name, old, new):
        if self.config_file_specified:
            # it was manually specified, ignore
            return
        self.config_file_name = new.replace('-', '_') + u'_config.py'
        # don't let this count as specifying the config file
        self.config_file_specified.remove(self.config_file_name)

    # connection info:
    transport = CaselessStrEnum(['tcp', 'ipc'],
                                default_value='tcp',
                                config=True)
    ip = Unicode(
        config=True,
        help="Set the IP or interface on which the kernel will listen.")

    def _ip_default(self):
        if self.transport == 'ipc':
            if self.connection_file:
                return os.path.splitext(self.abs_connection_file)[0] + '-ipc'
            else:
                return 'kernel-ipc'
        else:
            return localhost()

    hb_port = Integer(0,
                      config=True,
                      help="set the heartbeat port [default: random]")
    shell_port = Integer(0,
                         config=True,
                         help="set the shell (ROUTER) port [default: random]")
    iopub_port = Integer(0,
                         config=True,
                         help="set the iopub (PUB) port [default: random]")
    stdin_port = Integer(0,
                         config=True,
                         help="set the stdin (ROUTER) port [default: random]")
    control_port = Integer(
        0, config=True, help="set the control (ROUTER) port [default: random]")
    connection_file = Unicode(
        '',
        config=True,
        help=
        """JSON file in which to store connection info [default: kernel-<pid>.json]
    
    This file will contain the IP, ports, and authentication key needed to connect
    clients to this kernel. By default, this file will be created in the security dir
    of the current profile, but can be specified by absolute path.
    """)

    @property
    def abs_connection_file(self):
        if os.path.basename(self.connection_file) == self.connection_file:
            return os.path.join(self.profile_dir.security_dir,
                                self.connection_file)
        else:
            return self.connection_file

    # streams, etc.
    no_stdout = Bool(False,
                     config=True,
                     help="redirect stdout to the null device")
    no_stderr = Bool(False,
                     config=True,
                     help="redirect stderr to the null device")
    outstream_class = DottedObjectName(
        'IPython.kernel.zmq.iostream.OutStream',
        config=True,
        help="The importstring for the OutStream factory")
    displayhook_class = DottedObjectName(
        'IPython.kernel.zmq.displayhook.ZMQDisplayHook',
        config=True,
        help="The importstring for the DisplayHook factory")

    # polling
    parent_handle = Integer(
        0,
        config=True,
        help="""kill this process if its parent dies.  On Windows, the argument
        specifies the HANDLE of the parent process, otherwise it is simply boolean.
        """)
    interrupt = Integer(0,
                        config=True,
                        help="""ONLY USED ON WINDOWS
        Interrupt this process when the parent is signaled.
        """)

    def init_crash_handler(self):
        # Install minimal exception handling
        sys.excepthook = FormattedTB(mode='Verbose',
                                     color_scheme='NoColor',
                                     ostream=sys.__stdout__)

    def init_poller(self):
        if sys.platform == 'win32':
            if self.interrupt or self.parent_handle:
                self.poller = ParentPollerWindows(self.interrupt,
                                                  self.parent_handle)
        elif self.parent_handle:
            self.poller = ParentPollerUnix()

    def _bind_socket(self, s, port):
        iface = '%s://%s' % (self.transport, self.ip)
        if self.transport == 'tcp':
            if port <= 0:
                port = s.bind_to_random_port(iface)
            else:
                s.bind("tcp://%s:%i" % (self.ip, port))
        elif self.transport == 'ipc':
            if port <= 0:
                port = 1
                path = "%s-%i" % (self.ip, port)
                while os.path.exists(path):
                    port = port + 1
                    path = "%s-%i" % (self.ip, port)
            else:
                path = "%s-%i" % (self.ip, port)
            s.bind("ipc://%s" % path)
        return port

    def load_connection_file(self):
        """load ip/port/hmac config from JSON connection file"""
        try:
            fname = filefind(self.connection_file,
                             ['.', self.profile_dir.security_dir])
        except IOError:
            self.log.debug("Connection file not found: %s",
                           self.connection_file)
            # This means I own it, so I will clean it up:
            atexit.register(self.cleanup_connection_file)
            return
        self.log.debug(u"Loading connection file %s", fname)
        with open(fname) as f:
            s = f.read()
        cfg = json.loads(s)
        self.transport = cfg.get('transport', self.transport)
        if self.ip == self._ip_default() and 'ip' in cfg:
            # not overridden by config or cl_args
            self.ip = cfg['ip']
        for channel in ('hb', 'shell', 'iopub', 'stdin', 'control'):
            name = channel + '_port'
            if getattr(self, name) == 0 and name in cfg:
                # not overridden by config or cl_args
                setattr(self, name, cfg[name])
        if 'key' in cfg:
            self.config.Session.key = str_to_bytes(cfg['key'])

    def write_connection_file(self):
        """write connection info to JSON file"""
        cf = self.abs_connection_file
        self.log.debug("Writing connection file: %s", cf)
        write_connection_file(cf,
                              ip=self.ip,
                              key=self.session.key,
                              transport=self.transport,
                              shell_port=self.shell_port,
                              stdin_port=self.stdin_port,
                              hb_port=self.hb_port,
                              iopub_port=self.iopub_port,
                              control_port=self.control_port)

    def cleanup_connection_file(self):
        cf = self.abs_connection_file
        self.log.debug("Cleaning up connection file: %s", cf)
        try:
            os.remove(cf)
        except (IOError, OSError):
            pass

        self.cleanup_ipc_files()

    def cleanup_ipc_files(self):
        """cleanup ipc files if we wrote them"""
        if self.transport != 'ipc':
            return
        for port in (self.shell_port, self.iopub_port, self.stdin_port,
                     self.hb_port, self.control_port):
            ipcfile = "%s-%i" % (self.ip, port)
            try:
                os.remove(ipcfile)
            except (IOError, OSError):
                pass

    def init_connection_file(self):
        if not self.connection_file:
            self.connection_file = "kernel-%s.json" % os.getpid()
        try:
            self.load_connection_file()
        except Exception:
            self.log.error("Failed to load connection file: %r",
                           self.connection_file,
                           exc_info=True)
            self.exit(1)

    def init_sockets(self):
        # Create a context, a session, and the kernel sockets.
        self.log.info("Starting the kernel at pid: %i", os.getpid())
        context = zmq.Context.instance()
        # Uncomment this to try closing the context.
        # atexit.register(context.term)

        self.shell_socket = context.socket(zmq.ROUTER)
        self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
        self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)

        self.iopub_socket = context.socket(zmq.PUB)
        self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
        self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)

        self.stdin_socket = context.socket(zmq.ROUTER)
        self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
        self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)

        self.control_socket = context.socket(zmq.ROUTER)
        self.control_port = self._bind_socket(self.control_socket,
                                              self.control_port)
        self.log.debug("control ROUTER Channel on port: %i" %
                       self.control_port)

    def init_heartbeat(self):
        """start the heart beating"""
        # heartbeat doesn't share context, because it mustn't be blocked
        # by the GIL, which is accessed by libzmq when freeing zero-copy messages
        hb_ctx = zmq.Context()
        self.heartbeat = Heartbeat(hb_ctx,
                                   (self.transport, self.ip, self.hb_port))
        self.hb_port = self.heartbeat.port
        self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
        self.heartbeat.start()

    def log_connection_info(self):
        """display connection info, and store ports"""
        basename = os.path.basename(self.connection_file)
        if basename == self.connection_file or \
            os.path.dirname(self.connection_file) == self.profile_dir.security_dir:
            # use shortname
            tail = basename
            if self.profile != 'default':
                tail += " --profile %s" % self.profile
        else:
            tail = self.connection_file
        lines = [
            "To connect another client to this kernel, use:",
            "    --existing %s" % tail,
        ]
        # log connection info
        # info-level, so often not shown.
        # frontends should use the %connect_info magic
        # to see the connection info
        for line in lines:
            self.log.info(line)
        # also raw print to the terminal if no parent_handle (`ipython kernel`)
        if not self.parent_handle:
            io.rprint(_ctrl_c_message)
            for line in lines:
                io.rprint(line)

        self.ports = dict(shell=self.shell_port,
                          iopub=self.iopub_port,
                          stdin=self.stdin_port,
                          hb=self.hb_port,
                          control=self.control_port)

    def init_session(self):
        """create our session object"""
        default_secure(self.config)
        self.session = Session(parent=self, username=u'kernel')

    def init_blackhole(self):
        """redirects stdout/stderr to devnull if necessary"""
        if self.no_stdout or self.no_stderr:
            blackhole = open(os.devnull, 'w')
            if self.no_stdout:
                sys.stdout = sys.__stdout__ = blackhole
            if self.no_stderr:
                sys.stderr = sys.__stderr__ = blackhole

    def init_io(self):
        """Redirect input streams and set a display hook."""
        if self.outstream_class:
            outstream_factory = import_item(str(self.outstream_class))
            sys.stdout = outstream_factory(self.session, self.iopub_socket,
                                           u'stdout')
            sys.stderr = outstream_factory(self.session, self.iopub_socket,
                                           u'stderr')
        if self.displayhook_class:
            displayhook_factory = import_item(str(self.displayhook_class))
            sys.displayhook = displayhook_factory(self.session,
                                                  self.iopub_socket)

    def init_signal(self):
        signal.signal(signal.SIGINT, signal.SIG_IGN)

    def init_kernel(self):
        """Create the Kernel object itself"""
        shell_stream = ZMQStream(self.shell_socket)
        control_stream = ZMQStream(self.control_socket)

        kernel_factory = import_item(str(self.kernel_class))

        kernel = kernel_factory(
            parent=self,
            session=self.session,
            shell_streams=[shell_stream, control_stream],
            iopub_socket=self.iopub_socket,
            stdin_socket=self.stdin_socket,
            log=self.log,
            profile_dir=self.profile_dir,
            user_ns=self.user_ns,
        )
        kernel.record_ports(self.ports)
        self.kernel = kernel

    def init_gui_pylab(self):
        """Enable GUI event loop integration, taking pylab into account."""

        # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
        # to ensure that any exception is printed straight to stderr.
        # Normally _showtraceback associates the reply with an execution,
        # which means frontends will never draw it, as this exception
        # is not associated with any execute request.

        shell = self.shell
        _showtraceback = shell._showtraceback
        try:
            # replace pyerr-sending traceback with stderr
            def print_tb(etype, evalue, stb):
                print("GUI event loop or pylab initialization failed",
                      file=io.stderr)
                print(shell.InteractiveTB.stb2text(stb), file=io.stderr)

            shell._showtraceback = print_tb
            InteractiveShellApp.init_gui_pylab(self)
        finally:
            shell._showtraceback = _showtraceback

    def init_shell(self):
        self.shell = self.kernel.shell
        self.shell.configurables.append(self)

    @catch_config_error
    def initialize(self, argv=None):
        super(IPKernelApp, self).initialize(argv)
        self.init_blackhole()
        self.init_connection_file()
        self.init_session()
        self.init_poller()
        self.init_sockets()
        self.init_heartbeat()
        # writing/displaying connection info must be *after* init_sockets/heartbeat
        self.log_connection_info()
        self.write_connection_file()
        self.init_io()
        self.init_signal()
        self.init_kernel()
        # shell init steps
        self.init_path()
        self.init_shell()
        self.init_gui_pylab()
        self.init_extensions()
        self.init_code()
        # flush stdout/stderr, so that anything written to these streams during
        # initialization do not get associated with the first execution request
        sys.stdout.flush()
        sys.stderr.flush()

    def start(self):
        if self.poller is not None:
            self.poller.start()
        self.kernel.start()
        try:
            ioloop.IOLoop.instance().start()
        except KeyboardInterrupt:
            pass
Esempio n. 29
0
class NotebookApp(BaseIPythonApplication):

    name = 'ipython-notebook'

    description = """
        The IPython HTML Notebook.
        
        This launches a Tornado based HTML Notebook Server that serves up an
        HTML5/Javascript Notebook client.
    """
    examples = _examples
    aliases = aliases
    flags = flags

    classes = [
        KernelManager,
        ProfileDir,
        Session,
        MappingKernelManager,
        ContentsManager,
        FileContentsManager,
        NotebookNotary,
        KernelSpecManager,
    ]
    flags = Dict(flags)
    aliases = Dict(aliases)

    subcommands = dict(list=(NbserverListApp,
                             NbserverListApp.description.splitlines()[0]), )

    _log_formatter_cls = LogFormatter

    def _log_level_default(self):
        return logging.INFO

    def _log_datefmt_default(self):
        """Exclude date from default date format"""
        return "%H:%M:%S"

    def _log_format_default(self):
        """override default log format to include time"""
        return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s"

    # create requested profiles by default, if they don't exist:
    auto_create = Bool(True)

    # file to be opened in the notebook server
    file_to_run = Unicode('', config=True)

    # Network related information

    allow_origin = Unicode('',
                           config=True,
                           help="""Set the Access-Control-Allow-Origin header
        
        Use '*' to allow any origin to access your server.
        
        Takes precedence over allow_origin_pat.
        """)

    allow_origin_pat = Unicode(
        '',
        config=True,
        help=
        """Use a regular expression for the Access-Control-Allow-Origin header
        
        Requests from an origin matching the expression will get replies with:
        
            Access-Control-Allow-Origin: origin
        
        where `origin` is the origin of the request.
        
        Ignored if allow_origin is set.
        """)

    allow_credentials = Bool(
        False,
        config=True,
        help="Set the Access-Control-Allow-Credentials: true header")

    default_url = Unicode('/tree',
                          config=True,
                          help="The default URL to redirect to from `/`")

    ip = Unicode('localhost',
                 config=True,
                 help="The IP address the notebook server will listen on.")

    def _ip_default(self):
        """Return localhost if available, 127.0.0.1 otherwise.
        
        On some (horribly broken) systems, localhost cannot be bound.
        """
        s = socket.socket()
        try:
            s.bind(('localhost', 0))
        except socket.error as e:
            self.log.warn(
                "Cannot bind to localhost, using 127.0.0.1 as default ip\n%s",
                e)
            return '127.0.0.1'
        else:
            s.close()
            return 'localhost'

    def _ip_changed(self, name, old, new):
        if new == u'*': self.ip = u''

    port = Integer(8888,
                   config=True,
                   help="The port the notebook server will listen on.")
    port_retries = Integer(
        50,
        config=True,
        help=
        "The number of additional ports to try if the specified port is not available."
    )

    certfile = Unicode(
        u'',
        config=True,
        help="""The full path to an SSL/TLS certificate file.""")

    keyfile = Unicode(
        u'',
        config=True,
        help="""The full path to a private key file for usage with SSL/TLS.""")

    cookie_secret_file = Unicode(
        config=True, help="""The file where the cookie secret is stored.""")

    def _cookie_secret_file_default(self):
        if self.profile_dir is None:
            return ''
        return os.path.join(self.profile_dir.security_dir,
                            'notebook_cookie_secret')

    cookie_secret = Bytes(b'',
                          config=True,
                          help="""The random bytes used to secure cookies.
        By default this is a new random number every time you start the Notebook.
        Set it to a value in a config file to enable logins to persist across server sessions.
        
        Note: Cookie secrets should be kept private, do not share config files with
        cookie_secret stored in plaintext (you can read the value from a file).
        """)

    def _cookie_secret_default(self):
        if os.path.exists(self.cookie_secret_file):
            with io.open(self.cookie_secret_file, 'rb') as f:
                return f.read()
        else:
            secret = base64.encodestring(os.urandom(1024))
            self._write_cookie_secret_file(secret)
            return secret

    def _write_cookie_secret_file(self, secret):
        """write my secret to my secret_file"""
        self.log.info("Writing notebook server cookie secret to %s",
                      self.cookie_secret_file)
        with io.open(self.cookie_secret_file, 'wb') as f:
            f.write(secret)
        try:
            os.chmod(self.cookie_secret_file, 0o600)
        except OSError:
            self.log.warn("Could not set permissions on %s",
                          self.cookie_secret_file)

    password = Unicode(u'',
                       config=True,
                       help="""Hashed password to use for web authentication.

                      To generate, type in a python/IPython shell:

                        from IPython.lib import passwd; passwd()

                      The string should be of the form type:salt:hashed-password.
                      """)

    open_browser = Bool(True,
                        config=True,
                        help="""Whether to open in a browser after starting.
                        The specific browser used is platform dependent and
                        determined by the python standard library `webbrowser`
                        module, unless it is overridden using the --browser
                        (NotebookApp.browser) configuration option.
                        """)

    browser = Unicode(u'',
                      config=True,
                      help="""Specify what command to use to invoke a web
                      browser when opening the notebook. If not specified, the
                      default browser will be determined by the `webbrowser`
                      standard library module, which allows setting of the
                      BROWSER environment variable to override it.
                      """)

    webapp_settings = Dict(config=True,
                           help="DEPRECATED, use tornado_settings")

    def _webapp_settings_changed(self, name, old, new):
        self.log.warn(
            "\n    webapp_settings is deprecated, use tornado_settings.\n")
        self.tornado_settings = new

    tornado_settings = Dict(
        config=True,
        help="Supply overrides for the tornado.web.Application that the "
        "IPython notebook uses.")

    ssl_options = Dict(config=True,
                       help="""Supply SSL options for the tornado HTTPServer.
            See the tornado docs for details.""")

    jinja_environment_options = Dict(
        config=True,
        help="Supply extra arguments that will be passed to Jinja environment."
    )

    jinja_template_vars = Dict(
        config=True,
        help="Extra variables to supply to jinja templates when rendering.",
    )

    enable_mathjax = Bool(
        True,
        config=True,
        help="""Whether to enable MathJax for typesetting math/TeX

        MathJax is the javascript library IPython uses to render math/LaTeX. It is
        very large, so you may want to disable it if you have a slow internet
        connection, or for offline use of the notebook.

        When disabled, equations etc. will appear as their untransformed TeX source.
        """)

    def _enable_mathjax_changed(self, name, old, new):
        """set mathjax url to empty if mathjax is disabled"""
        if not new:
            self.mathjax_url = u''

    base_url = Unicode('/',
                       config=True,
                       help='''The base URL for the notebook server.

                               Leading and trailing slashes can be omitted,
                               and will automatically be added.
                               ''')

    def _base_url_changed(self, name, old, new):
        if not new.startswith('/'):
            self.base_url = '/' + new
        elif not new.endswith('/'):
            self.base_url = new + '/'

    base_project_url = Unicode('/',
                               config=True,
                               help="""DEPRECATED use base_url""")

    def _base_project_url_changed(self, name, old, new):
        self.log.warn("base_project_url is deprecated, use base_url")
        self.base_url = new

    extra_static_paths = List(
        Unicode,
        config=True,
        help="""Extra paths to search for serving static files.
        
        This allows adding javascript/css to be available from the notebook server machine,
        or overriding individual files in the IPython""")

    def _extra_static_paths_default(self):
        return [os.path.join(self.profile_dir.location, 'static')]

    @property
    def static_file_path(self):
        """return extra paths + the default location"""
        return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH]

    extra_template_paths = List(
        Unicode,
        config=True,
        help="""Extra paths to search for serving jinja templates.

        Can be used to override templates from IPython.html.templates.""")

    def _extra_template_paths_default(self):
        return []

    @property
    def template_file_path(self):
        """return extra paths + the default locations"""
        return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST

    extra_nbextensions_path = List(
        Unicode,
        config=True,
        help="""extra paths to look for Javascript notebook extensions""")

    @property
    def nbextensions_path(self):
        """The path to look for Javascript notebook extensions"""
        return self.extra_nbextensions_path + [
            os.path.join(get_ipython_dir(), 'nbextensions')
        ] + SYSTEM_NBEXTENSIONS_DIRS

    websocket_url = Unicode("",
                            config=True,
                            help="""The base URL for websockets,
        if it differs from the HTTP server (hint: it almost certainly doesn't).
        
        Should be in the form of an HTTP origin: ws[s]://hostname[:port]
        """)
    mathjax_url = Unicode("", config=True, help="""The url for MathJax.js.""")

    def _mathjax_url_default(self):
        if not self.enable_mathjax:
            return u''
        static_url_prefix = self.tornado_settings.get(
            "static_url_prefix", url_path_join(self.base_url, "static"))

        # try local mathjax, either in nbextensions/mathjax or static/mathjax
        for (url_prefix, search_path) in [
            (url_path_join(self.base_url,
                           "nbextensions"), self.nbextensions_path),
            (static_url_prefix, self.static_file_path),
        ]:
            self.log.debug("searching for local mathjax in %s", search_path)
            try:
                mathjax = filefind(os.path.join('mathjax', 'MathJax.js'),
                                   search_path)
            except IOError:
                continue
            else:
                url = url_path_join(url_prefix, u"mathjax/MathJax.js")
                self.log.info("Serving local MathJax from %s at %s", mathjax,
                              url)
                return url

        # no local mathjax, serve from CDN
        url = u"https://cdn.mathjax.org/mathjax/latest/MathJax.js"
        self.log.info("Using MathJax from CDN: %s", url)
        return url

    def _mathjax_url_changed(self, name, old, new):
        if new and not self.enable_mathjax:
            # enable_mathjax=False overrides mathjax_url
            self.mathjax_url = u''
        else:
            self.log.info("Using MathJax: %s", new)

    contents_manager_class = Type(default_value=FileContentsManager,
                                  klass=ContentsManager,
                                  config=True,
                                  help='The notebook manager class to use.')
    kernel_manager_class = Type(default_value=MappingKernelManager,
                                config=True,
                                help='The kernel manager class to use.')
    session_manager_class = Type(default_value=SessionManager,
                                 config=True,
                                 help='The session manager class to use.')
    cluster_manager_class = Type(default_value=ClusterManager,
                                 config=True,
                                 help='The cluster manager class to use.')

    config_manager_class = Type(default_value=ConfigManager,
                                config=True,
                                help='The config manager class to use')

    kernel_spec_manager = Instance(KernelSpecManager, allow_none=True)

    kernel_spec_manager_class = Type(default_value=KernelSpecManager,
                                     config=True,
                                     help="""
        The kernel spec manager class to use. Should be a subclass
        of `IPython.kernel.kernelspec.KernelSpecManager`.

        The Api of KernelSpecManager is provisional and might change
        without warning between this version of IPython and the next stable one.
        """)

    login_handler_class = Type(
        default_value=LoginHandler,
        klass=web.RequestHandler,
        config=True,
        help='The login handler class to use.',
    )

    logout_handler_class = Type(
        default_value=LogoutHandler,
        klass=web.RequestHandler,
        config=True,
        help='The logout handler class to use.',
    )

    trust_xheaders = Bool(
        False,
        config=True,
        help=
        ("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers"
         "sent by the upstream reverse proxy. Necessary if the proxy handles SSL"
         ))

    info_file = Unicode()

    def _info_file_default(self):
        info_file = "nbserver-%s.json" % os.getpid()
        return os.path.join(self.profile_dir.security_dir, info_file)

    pylab = Unicode('disabled',
                    config=True,
                    help="""
        DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib.
        """)

    def _pylab_changed(self, name, old, new):
        """when --pylab is specified, display a warning and exit"""
        if new != 'warn':
            backend = ' %s' % new
        else:
            backend = ''
        self.log.error(
            "Support for specifying --pylab on the command line has been removed."
        )
        self.log.error(
            "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself."
            .format(backend))
        self.exit(1)

    notebook_dir = Unicode(
        config=True, help="The directory to use for notebooks and kernels.")

    def _notebook_dir_default(self):
        if self.file_to_run:
            return os.path.dirname(os.path.abspath(self.file_to_run))
        else:
            return py3compat.getcwd()

    def _notebook_dir_changed(self, name, old, new):
        """Do a bit of validation of the notebook dir."""
        if not os.path.isabs(new):
            # If we receive a non-absolute path, make it absolute.
            self.notebook_dir = os.path.abspath(new)
            return
        if not os.path.isdir(new):
            raise TraitError("No such notebook dir: %r" % new)

        # setting App.notebook_dir implies setting notebook and kernel dirs as well
        self.config.FileContentsManager.root_dir = new
        self.config.MappingKernelManager.root_dir = new

    server_extensions = List(
        Unicode(),
        config=True,
        help=(
            "Python modules to load as notebook server extensions. "
            "This is an experimental API, and may change in future releases."))

    reraise_server_extension_failures = Bool(
        False,
        config=True,
        help="Reraise exceptions encountered loading server extensions?",
    )

    def parse_command_line(self, argv=None):
        super(NotebookApp, self).parse_command_line(argv)

        if self.extra_args:
            arg0 = self.extra_args[0]
            f = os.path.abspath(arg0)
            self.argv.remove(arg0)
            if not os.path.exists(f):
                self.log.critical("No such file or directory: %s", f)
                self.exit(1)

            # Use config here, to ensure that it takes higher priority than
            # anything that comes from the profile.
            c = Config()
            if os.path.isdir(f):
                c.NotebookApp.notebook_dir = f
            elif os.path.isfile(f):
                c.NotebookApp.file_to_run = f
            self.update_config(c)

    def init_configurables(self):
        self.kernel_spec_manager = self.kernel_spec_manager_class(
            parent=self,
            ipython_dir=self.ipython_dir,
        )
        self.kernel_manager = self.kernel_manager_class(
            parent=self,
            log=self.log,
            connection_dir=self.profile_dir.security_dir,
        )
        self.contents_manager = self.contents_manager_class(
            parent=self,
            log=self.log,
        )
        self.session_manager = self.session_manager_class(
            parent=self,
            log=self.log,
            kernel_manager=self.kernel_manager,
            contents_manager=self.contents_manager,
        )
        self.cluster_manager = self.cluster_manager_class(
            parent=self,
            log=self.log,
        )

        self.config_manager = self.config_manager_class(
            parent=self,
            log=self.log,
            profile_dir=self.profile_dir.location,
        )

    def init_logging(self):
        # This prevents double log messages because tornado use a root logger that
        # self.log is a child of. The logging module dipatches log messages to a log
        # and all of its ancenstors until propagate is set to False.
        self.log.propagate = False

        for log in app_log, access_log, gen_log:
            # consistent log output name (NotebookApp instead of tornado.access, etc.)
            log.name = self.log.name
        # hook up tornado 3's loggers to our app handlers
        logger = logging.getLogger('tornado')
        logger.propagate = True
        logger.parent = self.log
        logger.setLevel(self.log.level)

    def init_webapp(self):
        """initialize tornado webapp and httpserver"""
        self.tornado_settings['allow_origin'] = self.allow_origin
        if self.allow_origin_pat:
            self.tornado_settings['allow_origin_pat'] = re.compile(
                self.allow_origin_pat)
        self.tornado_settings['allow_credentials'] = self.allow_credentials
        # ensure default_url starts with base_url
        if not self.default_url.startswith(self.base_url):
            self.default_url = url_path_join(self.base_url, self.default_url)

        self.web_app = NotebookWebApplication(
            self, self.kernel_manager, self.contents_manager,
            self.cluster_manager, self.session_manager,
            self.kernel_spec_manager, self.config_manager, self.log,
            self.base_url, self.default_url, self.tornado_settings,
            self.jinja_environment_options)
        ssl_options = self.ssl_options
        if self.certfile:
            ssl_options['certfile'] = self.certfile
        if self.keyfile:
            ssl_options['keyfile'] = self.keyfile
        if not ssl_options:
            # None indicates no SSL config
            ssl_options = None
        else:
            # Disable SSLv3, since its use is discouraged.
            ssl_options['ssl_version'] = ssl.PROTOCOL_TLSv1
        self.login_handler_class.validate_security(self,
                                                   ssl_options=ssl_options)
        self.http_server = httpserver.HTTPServer(self.web_app,
                                                 ssl_options=ssl_options,
                                                 xheaders=self.trust_xheaders)

        success = None
        for port in random_ports(self.port, self.port_retries + 1):
            try:
                self.http_server.listen(port, self.ip)
            except socket.error as e:
                if e.errno == errno.EADDRINUSE:
                    self.log.info(
                        'The port %i is already in use, trying another random port.'
                        % port)
                    continue
                elif e.errno in (errno.EACCES,
                                 getattr(errno, 'WSAEACCES', errno.EACCES)):
                    self.log.warn("Permission to listen on port %i denied" %
                                  port)
                    continue
                else:
                    raise
            else:
                self.port = port
                success = True
                break
        if not success:
            self.log.critical(
                'ERROR: the notebook server could not be started because '
                'no available port could be found.')
            self.exit(1)

    @property
    def display_url(self):
        ip = self.ip if self.ip else '[all ip addresses on your system]'
        return self._url(ip)

    @property
    def connection_url(self):
        ip = self.ip if self.ip else 'localhost'
        return self._url(ip)

    def _url(self, ip):
        proto = 'https' if self.certfile else 'http'
        return "%s://%s:%i%s" % (proto, ip, self.port, self.base_url)

    def init_terminals(self):
        try:
            from .terminal import initialize
            initialize(self.web_app)
            self.web_app.settings['terminals_available'] = True
        except ImportError as e:
            log = self.log.debug if sys.platform == 'win32' else self.log.warn
            log("Terminals not available (error was %s)", e)

    def init_signal(self):
        if not sys.platform.startswith('win'):
            signal.signal(signal.SIGINT, self._handle_sigint)
        signal.signal(signal.SIGTERM, self._signal_stop)
        if hasattr(signal, 'SIGUSR1'):
            # Windows doesn't support SIGUSR1
            signal.signal(signal.SIGUSR1, self._signal_info)
        if hasattr(signal, 'SIGINFO'):
            # only on BSD-based systems
            signal.signal(signal.SIGINFO, self._signal_info)

    def _handle_sigint(self, sig, frame):
        """SIGINT handler spawns confirmation dialog"""
        # register more forceful signal handler for ^C^C case
        signal.signal(signal.SIGINT, self._signal_stop)
        # request confirmation dialog in bg thread, to avoid
        # blocking the App
        thread = threading.Thread(target=self._confirm_exit)
        thread.daemon = True
        thread.start()

    def _restore_sigint_handler(self):
        """callback for restoring original SIGINT handler"""
        signal.signal(signal.SIGINT, self._handle_sigint)

    def _confirm_exit(self):
        """confirm shutdown on ^C
        
        A second ^C, or answering 'y' within 5s will cause shutdown,
        otherwise original SIGINT handler will be restored.
        
        This doesn't work on Windows.
        """
        info = self.log.info
        info('interrupted')
        print(self.notebook_info())
        sys.stdout.write("Shutdown this notebook server (y/[n])? ")
        sys.stdout.flush()
        r, w, x = select.select([sys.stdin], [], [], 5)
        if r:
            line = sys.stdin.readline()
            if line.lower().startswith('y') and 'n' not in line.lower():
                self.log.critical("Shutdown confirmed")
                ioloop.IOLoop.current().stop()
                return
        else:
            print("No answer for 5s:", end=' ')
        print("resuming operation...")
        # no answer, or answer is no:
        # set it back to original SIGINT handler
        # use IOLoop.add_callback because signal.signal must be called
        # from main thread
        ioloop.IOLoop.current().add_callback(self._restore_sigint_handler)

    def _signal_stop(self, sig, frame):
        self.log.critical("received signal %s, stopping", sig)
        ioloop.IOLoop.current().stop()

    def _signal_info(self, sig, frame):
        print(self.notebook_info())

    def init_components(self):
        """Check the components submodule, and warn if it's unclean"""
        status = submodule.check_submodule_status()
        if status == 'missing':
            self.log.warn(
                "components submodule missing, running `git submodule update`")
            submodule.update_submodules(submodule.ipython_parent())
        elif status == 'unclean':
            self.log.warn(
                "components submodule unclean, you may see 404s on static/components"
            )
            self.log.warn(
                "run `setup.py submodule` or `git submodule update` to update")

    def init_server_extensions(self):
        """Load any extensions specified by config.

        Import the module, then call the load_jupyter_server_extension function,
        if one exists.
        
        The extension API is experimental, and may change in future releases.
        """
        for modulename in self.server_extensions:
            try:
                mod = importlib.import_module(modulename)
                func = getattr(mod, 'load_jupyter_server_extension', None)
                if func is not None:
                    func(self)
            except Exception:
                if self.reraise_server_extension_failures:
                    raise
                self.log.warn("Error loading server extension %s",
                              modulename,
                              exc_info=True)

    @catch_config_error
    def initialize(self, argv=None):
        super(NotebookApp, self).initialize(argv)
        self.init_logging()
        self.init_configurables()
        self.init_components()
        self.init_webapp()
        self.init_terminals()
        self.init_signal()
        self.init_server_extensions()

    def cleanup_kernels(self):
        """Shutdown all kernels.
        
        The kernels will shutdown themselves when this process no longer exists,
        but explicit shutdown allows the KernelManagers to cleanup the connection files.
        """
        self.log.info('Shutting down kernels')
        self.kernel_manager.shutdown_all()

    def notebook_info(self):
        "Return the current working directory and the server url information"
        info = self.contents_manager.info_string() + "\n"
        info += "%d active kernels \n" % len(self.kernel_manager._kernels)
        return info + "The IPython Notebook is running at: %s" % self.display_url

    def server_info(self):
        """Return a JSONable dict of information about this server."""
        return {
            'url': self.connection_url,
            'hostname': self.ip if self.ip else 'localhost',
            'port': self.port,
            'secure': bool(self.certfile),
            'base_url': self.base_url,
            'notebook_dir': os.path.abspath(self.notebook_dir),
            'pid': os.getpid()
        }

    def write_server_info_file(self):
        """Write the result of server_info() to the JSON file info_file."""
        with open(self.info_file, 'w') as f:
            json.dump(self.server_info(), f, indent=2)

    def remove_server_info_file(self):
        """Remove the nbserver-<pid>.json file created for this server.
        
        Ignores the error raised when the file has already been removed.
        """
        try:
            os.unlink(self.info_file)
        except OSError as e:
            if e.errno != errno.ENOENT:
                raise

    def start(self):
        """ Start the IPython Notebook server app, after initialization
        
        This method takes no arguments so all configuration and initialization
        must be done prior to calling this method."""
        if self.subapp is not None:
            return self.subapp.start()

        info = self.log.info
        for line in self.notebook_info().split("\n"):
            info(line)
        info(
            "Use Control-C to stop this server and shut down all kernels (twice to skip confirmation)."
        )

        self.write_server_info_file()

        if self.open_browser or self.file_to_run:
            try:
                browser = webbrowser.get(self.browser or None)
            except webbrowser.Error as e:
                self.log.warn('No web browser found: %s.' % e)
                browser = None

            if self.file_to_run:
                if not os.path.exists(self.file_to_run):
                    self.log.critical("%s does not exist" % self.file_to_run)
                    self.exit(1)

                relpath = os.path.relpath(self.file_to_run, self.notebook_dir)
                uri = url_path_join('notebooks', *relpath.split(os.sep))
            else:
                uri = 'tree'
            if browser:
                b = lambda: browser.open(
                    url_path_join(self.connection_url, uri), new=2)
                threading.Thread(target=b).start()

        self.io_loop = ioloop.IOLoop.current()
        if sys.platform.startswith('win'):
            # add no-op to wake every 5s
            # to handle signals that may be ignored by the inner loop
            pc = ioloop.PeriodicCallback(lambda: None, 5000)
            pc.start()
        try:
            self.io_loop.start()
        except KeyboardInterrupt:
            info("Interrupted...")
        finally:
            self.cleanup_kernels()
            self.remove_server_info_file()

    def stop(self):
        def _stop():
            self.http_server.stop()
            self.io_loop.stop()

        self.io_loop.add_callback(_stop)
Esempio n. 30
0
class RMessage(Reference):
    msg_id = Unicode()
    msg_type = Unicode()
    header = Dict()
    parent_header = Dict()
    content = Dict()