Beispiel #1
0
class Box(DOMWidget):
    """Displays multiple widgets in a group."""
    _view_name = Unicode('BoxView', sync=True)

    # Child widgets in the container.
    # Using a tuple here to force reassignment to update the list.
    # When a proper notifying-list trait exists, that is what should be used here.
    children = Tuple(sync=True, allow_none=False)
    
    _overflow_values = ['visible', 'hidden', 'scroll', 'auto', 'initial', 'inherit', '']
    overflow_x = CaselessStrEnum(
        values=_overflow_values, 
        default_value='', allow_none=False, sync=True, help="""Specifies what
        happens to content that is too large for the rendered region.""")
    overflow_y = CaselessStrEnum(
        values=_overflow_values, 
        default_value='', allow_none=False, sync=True, help="""Specifies what
        happens to content that is too large for the rendered region.""")

    box_style = CaselessStrEnum(
        values=['success', 'info', 'warning', 'danger', ''], 
        default_value='', allow_none=True, sync=True, help="""Use a
        predefined styling for the box.""")

    def __init__(self, children = (), **kwargs):
        kwargs['children'] = children
        super(Box, self).__init__(**kwargs)
        self.on_displayed(Box._fire_children_displayed)

    def _fire_children_displayed(self):
        for child in self.children:
            child._handle_displayed()
Beispiel #2
0
class DOMWidget(Widget):
    visible = Bool(True, allow_none=True, help="Whether the widget is visible.  False collapses the empty space, while None preserves the empty space.", sync=True)
    _css = Tuple(sync=True, help="CSS property list: (selector, key, value)")
    _dom_classes = Tuple(sync=True, help="DOM classes applied to widget.$el.")
    
    width = CUnicode(sync=True)
    height = CUnicode(sync=True)
    # A default padding of 2.5 px makes the widgets look nice when displayed inline.
    padding = CUnicode("2.5px", sync=True)
    margin = CUnicode(sync=True)

    color = Unicode(sync=True)
    background_color = Unicode(sync=True)
    border_color = Unicode(sync=True)

    border_width = CUnicode(sync=True)
    border_radius = CUnicode(sync=True)
    border_style = CaselessStrEnum(values=[ # http://www.w3schools.com/cssref/pr_border-style.asp
        'none', 
        'hidden', 
        'dotted', 
        'dashed', 
        'solid', 
        'double', 
        'groove', 
        'ridge', 
        'inset', 
        'outset', 
        'initial', 
        'inherit', ''],
        default_value='', sync=True)

    font_style = CaselessStrEnum(values=[ # http://www.w3schools.com/cssref/pr_font_font-style.asp
        'normal', 
        'italic', 
        'oblique', 
        'initial', 
        'inherit', ''], 
        default_value='', sync=True)
    font_weight = CaselessStrEnum(values=[ # http://www.w3schools.com/cssref/pr_font_weight.asp
        'normal', 
        'bold', 
        'bolder', 
        'lighter',
        'initial', 
        'inherit', ''] + [str(100 * (i+1)) for i in range(9)], 
        default_value='', sync=True)
    font_size = CUnicode(sync=True)
    font_family = Unicode(sync=True)

    def __init__(self, *pargs, **kwargs):
        super(DOMWidget, self).__init__(*pargs, **kwargs)

        def _validate_border(name, old, new):
            if new is not None and new != '':
                if name != 'border_width' and not self.border_width:
                    self.border_width = 1
                if name != 'border_style' and self.border_style == '':
                    self.border_style = 'solid'
        self.on_trait_change(_validate_border, ['border_width', 'border_style', 'border_color'])
Beispiel #3
0
class FloatProgress(_BoundedFloat):
    """ Displays a progress bar.

    Parameters
    -----------
    value : float
	position within the range of the progress bar
    min : float
	minimal position of the slider
    max : float
	maximal position of the slider
    step : float
	step of the progress bar
    description : str
	name of the progress bar
    bar_style: {'success', 'info', 'warning', 'danger', ''}, optional
	color of the progress bar, default is '' (blue)
	colors are: 'success'-green, 'info'-light blue, 'warning'-orange, 'danger'-red
"""
    _view_name = Unicode('ProgressView', sync=True)

    bar_style = CaselessStrEnum(
        values=['success', 'info', 'warning', 'danger', ''],
        default_value='',
        allow_none=True,
        sync=True,
        help="""Use a
        predefined styling for the progess bar.""")
Beispiel #4
0
class ToggleButton(_Bool):
    """Displays a boolean `value` in the form of a toggle button.

       Parameters
       ----------
       value : {True,False}
           value of the toggle button: True-pressed, False-unpressed
       description : str
	   description displayed next to the button
       tooltip: str
           tooltip caption of the toggle button
       icon: str
           font-awesome icon name
"""
    _view_name = Unicode('ToggleButtonView', sync=True)
    tooltip = Unicode(help="Tooltip caption of the toggle button.", sync=True)
    icon = Unicode('', help="Font-awesome icon.", sync=True)

    button_style = CaselessStrEnum(
        values=['primary', 'success', 'info', 'warning', 'danger', ''],
        default_value='',
        allow_none=True,
        sync=True,
        help="""Use a
        predefined styling for the button.""")
class InlineBackend(InlineBackendConfig):
    """An object to store configuration of the inline backend."""
    def _config_changed(self, name, old, new):
        # warn on change of renamed config section
        if new.InlineBackendConfig != old.InlineBackendConfig:
            warn("InlineBackendConfig has been renamed to InlineBackend")
        super(InlineBackend, self)._config_changed(name, old, new)

    # The typical default figure size is too large for inline use,
    # so we shrink the figure size to 6x4, and tweak fonts to
    # make that fit.
    rc = Dict(
        {
            'figure.figsize': (6.0, 4.0),
            # play nicely with white background in the Qt and notebook frontend
            'figure.facecolor': 'white',
            'figure.edgecolor': 'white',
            # 12pt labels get cutoff on 6x4 logplots, so use 10pt.
            'font.size': 10,
            # 72 dpi matches SVG/qtconsole
            # this only affects PNG export, as SVG has no dpi setting
            'savefig.dpi': 72,
            # 10pt still needs a little more room on the xlabel:
            'figure.subplot.bottom': .125
        },
        config=True,
        help="""Subset of matplotlib rcParams that should be different for the
        inline backend.""")

    figure_format = CaselessStrEnum(
        ['svg', 'png', 'retina'],
        default_value='png',
        config=True,
        help="The image format for figures with the inline backend.")

    def _figure_format_changed(self, name, old, new):
        if self.shell is None:
            return
        else:
            select_figure_format(self.shell, new)

    close_figures = Bool(True,
                         config=True,
                         help="""Close all figures at the end of each cell.
        
        When True, ensures that each cell starts with no active figures, but it
        also means that one must keep track of references in order to edit or
        redraw figures in subsequent cells. This mode is ideal for the notebook,
        where residual plots from other cells might be surprising.
        
        When False, one must call figure() to create new figures. This means
        that gcf() and getfigs() can reference figures created in other cells,
        and the active figure can continue to be edited with pylab/pyplot
        methods that reference the current active figure. This mode facilitates
        iterative editing of figures, and behaves most consistently with
        other matplotlib backends, but figure barriers between cells must
        be explicit.
        """)

    shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
Beispiel #6
0
class FloatRangeSlider(_BoundedFloatRange):
    """ Slider/trackbar for displaying a floating value range (within the specified range of values).

	Parameters
	----------
	value : float tuple
	    range of the slider displayed
	min : float
	    minimal position of the slider
	max : float
	    maximal position of the slider
	step : float
	    step of the trackbar
	description : str
	    name of the slider
	orientation : {'vertical', 'horizontal}, optional
            default is horizontal
	readout : {True, False}, optional
	    default is True, display the current value of the slider next to it		
	slider_color : str Unicode color code (eg. '#C13535'), optional 
	    color of the slider 
	color : str Unicode color code (eg. '#C13535'), optional
	    color of the value displayed (if readout == True)
    """
    _view_name = Unicode('FloatSliderView', sync=True)
    orientation = CaselessStrEnum(values=['horizontal', 'vertical'],
                                  default_value='horizontal',
                                  help="Vertical or horizontal.",
                                  sync=True)
    _range = Bool(True, help="Display a range selector", sync=True)
    readout = Bool(True,
                   help="Display the current value of the slider next to it.",
                   sync=True)
    slider_color = Unicode(sync=True)
Beispiel #7
0
class InlineBackendConfig(SingletonConfigurable):
    """An object to store configuration of the inline backend."""

    # The typical default figure size is too large for inline use,
    # so we shrink the figure size to 6x4, and tweak fonts to
    # make that fit.  This is configurable via Global.pylab_inline_rc,
    # or rather it will be once the zmq kernel is hooked up to
    # the config system.
    rc = Dict(
        {
            'figure.figsize': (6.0, 4.0),
            # 12pt labels get cutoff on 6x4 logplots, so use 10pt.
            'font.size': 10,
            # 10pt still needs a little more room on the xlabel:
            'figure.subplot.bottom': .125
        },
        config=True,
        help="""Subset of matplotlib rcParams that should be different for the
        inline backend.""")
    figure_format = CaselessStrEnum(
        ['svg', 'png'],
        default_value='png',
        config=True,
        help="The image format for figures with the inline backend.")

    def _figure_format_changed(self, name, old, new):
        if self.shell is None:
            return
        else:
            select_figure_format(self.shell, new)

    shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
class SelectizeWidget(Select):
    _view_name = Unicode('SelectizeView', sync=True)
    theme = CaselessStrEnum(
        values=['default', 'legacy', 'bootstrap2', 'bootstrap3'],
        default_value='default',
        sync=True,
        help="""Use a them styling for the SelectizeWidget.""")
    disabled = Bool(False, help="Enable or disable user changes", sync=True)
    description = Unicode(
        help="Description of the value this widget represents", sync=True)

    def __init__(self, *args, **kwargs):
        Select.__init__(self, *args, **kwargs)
        self.value = ','.join(self.values)

    def _value_name_changed(self, name, old, new):
        """Called when the value name has been changed
           (typically by the frontend)."""
        if self.value_lock.acquire(False):
            try:
                self.value_name = self.value = new
            finally:
                self.value_lock.release()

    def _value_changed(self, name, old, new):
        """Called when value has been changed"""
        if self.value_lock.acquire(False):
            try:
                self.value_name = self.value = new
            finally:
                self.value_lock.release()
Beispiel #9
0
class IntProgress(_BoundedInt):
    """Progress bar that represents a int bounded by a minimum and maximum value."""
    _view_name = Unicode('ProgressView', sync=True)

    bar_style = CaselessStrEnum(
        values=['success', 'info', 'warning', 'danger', ''], 
        default_value='', allow_none=True, sync=True, help="""Use a
        predefined styling for the progess bar.""")
Beispiel #10
0
class IntRangeSlider(_BoundedIntRange):
    _view_name = Unicode('IntSliderView', sync=True)
    orientation = CaselessStrEnum(values=['horizontal', 'vertical'], 
        default_value='horizontal', allow_none=False, 
        help="Vertical or horizontal.", sync=True)
    _range = Bool(True, help="Display a range selector", sync=True)
    readout = Bool(True, help="Display the current value of the slider next to it.", sync=True)
    slider_color = Unicode(sync=True)
Beispiel #11
0
class FlexBox(Box):
    """Displays multiple widgets using the flexible box model."""
    _view_name = Unicode('FlexBoxView', sync=True)
    orientation = CaselessStrEnum(values=['vertical', 'horizontal'], default_value='vertical', sync=True)
    flex = Int(0, sync=True, help="""Specify the flexible-ness of the model.""")
    def _flex_changed(self, name, old, new):
        new = min(max(0, new), 2)
        if self.flex != new:
            self.flex = new

    _locations = ['start', 'center', 'end', 'baseline', 'stretch']
    pack = CaselessStrEnum(
        values=_locations, 
        default_value='start', allow_none=False, sync=True)
    align = CaselessStrEnum(
        values=_locations, 
        default_value='start', allow_none=False, sync=True)
Beispiel #12
0
class Dropdown(_Selection):
    """Allows you to select a single item from a dropdown."""
    _view_name = Unicode('DropdownView', sync=True)

    button_style = CaselessStrEnum(
        values=['primary', 'success', 'info', 'warning', 'danger', ''], 
        default_value='', allow_none=True, sync=True, help="""Use a
        predefined styling for the buttons.""")
Beispiel #13
0
class IntSlider(_BoundedInt):
    """Slider widget that represents a int bounded by a minimum and maximum value."""
    _view_name = Unicode('IntSliderView', sync=True)
    orientation = CaselessStrEnum(values=['horizontal', 'vertical'], 
        default_value='horizontal', help="Vertical or horizontal.", sync=True)
    _range = Bool(False, help="Display a range selector", sync=True)
    readout = Bool(True, help="Display the current value of the slider next to it.", sync=True)
    slider_color = Color(None, allow_none=True, sync=True)
Beispiel #14
0
class ToggleButtons(_Selection):
    """Group of toggle buttons that represent an enumeration.  Only one toggle
    button can be toggled at any point in time.""" 
    _view_name = Unicode('ToggleButtonsView', sync=True)

    button_style = CaselessStrEnum(
        values=['primary', 'success', 'info', 'warning', 'danger', ''], 
        default_value='', allow_none=True, sync=True, help="""Use a
        predefined styling for the buttons.""")
Beispiel #15
0
class Button(DOMWidget):
    """Button widget.
       This widget has an `on_click` method that allows you to listen for the 
       user clicking on the button.  The click event itself is stateless.

       Parameters
       ----------
       description : str
           description displayed next to the button
       tooltip: str
           tooltip caption of the toggle button
       icon: str
           font-awesome icon name
    """
    _view_name = Unicode('ButtonView', sync=True)

    # Keys
    description = Unicode('', help="Button label.", sync=True)
    tooltip = Unicode(help="Tooltip caption of the button.", sync=True)
    disabled = Bool(False, help="Enable or disable user changes.", sync=True)
    icon = Unicode('', help="Font-awesome icon.", sync=True)

    button_style = CaselessStrEnum(
        values=['primary', 'success', 'info', 'warning', 'danger', ''],
        default_value='',
        allow_none=True,
        sync=True,
        help="""Use a
        predefined styling for the button.""")

    def __init__(self, **kwargs):
        """Constructor"""
        super(Button, self).__init__(**kwargs)
        self._click_handlers = CallbackDispatcher()
        self.on_msg(self._handle_button_msg)

    def on_click(self, callback, remove=False):
        """Register a callback to execute when the button is clicked.

        The callback will be called with one argument,
        the clicked button widget instance.

        Parameters
        ----------
        remove : bool (optional)
            Set to true to remove the callback from the list of callbacks."""
        self._click_handlers.register_callback(callback, remove=remove)

    def _handle_button_msg(self, _, content):
        """Handle a msg from the front-end.

        Parameters
        ----------
        content: dict
            Content of the msg."""
        if content.get('event', '') == 'click':
            self._click_handlers(self)
Beispiel #16
0
class FloatProgress(_BoundedFloat):
    _view_name = Unicode('ProgressView', sync=True)

    bar_style = CaselessStrEnum(
        values=['success', 'info', 'warning', 'danger', ''],
        default_value='',
        allow_none=True,
        sync=True,
        help="""Use a
        predefined styling for the progess bar.""")
Beispiel #17
0
class ToggleButton(_Bool):
    """Displays a boolean `value`."""

    _view_name = Unicode('ToggleButtonView', sync=True)

    button_style = CaselessStrEnum(
        values=['primary', 'success', 'info', 'warning', 'danger', ''],
        default_value='',
        allow_none=True,
        sync=True,
        help="""Use a
        predefined styling for the button.""")
Beispiel #18
0
class IPKernelApp(KernelApp, InteractiveShellApp):
    name = 'ipkernel'

    aliases = Dict(aliases)
    flags = Dict(flags)
    classes = [Kernel, ZMQInteractiveShell, ProfileDir, Session]
    # configurables
    pylab = CaselessStrEnum(['tk', 'qt', 'wx', 'gtk', 'osx', 'inline', 'auto'],
        config=True,
        help="""Pre-load matplotlib and numpy for interactive use,
        selecting a particular matplotlib backend and loop integration.
        """
    )
    
    @catch_config_error
    def initialize(self, argv=None):
        super(IPKernelApp, self).initialize(argv)
        self.init_shell()
        self.init_extensions()
        self.init_code()

    def init_kernel(self):
        kernel_factory = Kernel

        if self.pylab:
            gui, backend = pylabtools.find_gui_and_backend(self.pylab)

        kernel = kernel_factory(config=self.config, session=self.session,
                                shell_socket=self.shell_socket,
                                iopub_socket=self.iopub_socket,
                                stdin_socket=self.stdin_socket,
                                log=self.log,
        )
        self.kernel = kernel
        kernel.record_ports(self.ports)

        if self.pylab:
            kernel.shell.enable_pylab(gui, import_all=self.pylab_import_all)

    def init_shell(self):
        self.shell = self.kernel.shell
Beispiel #19
0
class NbconvertApp(Application):

    fmt = CaselessStrEnum(converters.keys(),
                          default_value='rst',
                          config=True,
                          help="Supported conversion format")

    exclude = List([],
                   config=True,
                   help='list of cells to exclude while converting')

    aliases = {
        'format': 'NbconvertApp.fmt',
        'exclude': 'NbconvertApp.exclude',
        'highlight': 'Converter.highlight_source',
        'preamble': 'Converter.preamble',
    }

    def __init__(self, **kwargs):
        super(NbconvertApp, self).__init__(**kwargs)
        # ensure those are registerd
        self.classes.insert(0, Converter)
        self.classes.insert(0, ConverterRST)
        self.classes.insert(0, ConverterMarkdown)
        self.classes.insert(0, ConverterBloggerHTML)
        self.classes.insert(0, ConverterLaTeX)
        self.classes.insert(0, ConverterPy)

    def initialize(self, argv=None):
        self.parse_command_line(argv)
        cl_config = self.config
        self.update_config(cl_config)

    def run(self):
        """Convert a notebook in one step"""
        ConverterClass = converters[self.fmt]
        infile = (self.extra_args or [None])[0]
        converter = ConverterClass(infile=infile, config=self.config)
        converter.render()
Beispiel #20
0
class IPKernelApp(BaseIPythonApplication, InteractiveShellApp):
    name = 'ipkernel'
    aliases = Dict(kernel_aliases)
    flags = Dict(kernel_flags)
    classes = [Kernel, ZMQInteractiveShell, ProfileDir, Session]
    # the kernel class, as an importstring
    kernel_class = DottedObjectName('IPython.kernel.zmq.ipkernel.Kernel')
    kernel = Any()
    poller = Any(
    )  # don't restrict this even though current pollers are all Threads
    heartbeat = Instance(Heartbeat)
    session = Instance('IPython.kernel.zmq.session.Session')
    ports = Dict()

    # inherit config file name from parent:
    parent_appname = Unicode(config=True)

    def _parent_appname_changed(self, name, old, new):
        if self.config_file_specified:
            # it was manually specified, ignore
            return
        self.config_file_name = new.replace('-', '_') + u'_config.py'
        # don't let this count as specifying the config file
        self.config_file_specified = False

    # connection info:
    transport = CaselessStrEnum(['tcp', 'ipc'],
                                default_value='tcp',
                                config=True)
    ip = Unicode(
        config=True,
        help="Set the IP or interface on which the kernel will listen.")

    def _ip_default(self):
        if self.transport == 'ipc':
            if self.connection_file:
                return os.path.splitext(self.abs_connection_file)[0] + '-ipc'
            else:
                return 'kernel-ipc'
        else:
            return LOCALHOST

    hb_port = Integer(0,
                      config=True,
                      help="set the heartbeat port [default: random]")
    shell_port = Integer(0,
                         config=True,
                         help="set the shell (ROUTER) port [default: random]")
    iopub_port = Integer(0,
                         config=True,
                         help="set the iopub (PUB) port [default: random]")
    stdin_port = Integer(0,
                         config=True,
                         help="set the stdin (DEALER) port [default: random]")
    connection_file = Unicode(
        '',
        config=True,
        help=
        """JSON file in which to store connection info [default: kernel-<pid>.json]
    
    This file will contain the IP, ports, and authentication key needed to connect
    clients to this kernel. By default, this file will be created in the security dir
    of the current profile, but can be specified by absolute path.
    """)

    @property
    def abs_connection_file(self):
        if os.path.basename(self.connection_file) == self.connection_file:
            return os.path.join(self.profile_dir.security_dir,
                                self.connection_file)
        else:
            return self.connection_file

    # streams, etc.
    no_stdout = Bool(False,
                     config=True,
                     help="redirect stdout to the null device")
    no_stderr = Bool(False,
                     config=True,
                     help="redirect stderr to the null device")
    outstream_class = DottedObjectName(
        'IPython.kernel.zmq.iostream.OutStream',
        config=True,
        help="The importstring for the OutStream factory")
    displayhook_class = DottedObjectName(
        'IPython.kernel.zmq.displayhook.ZMQDisplayHook',
        config=True,
        help="The importstring for the DisplayHook factory")

    # polling
    parent = Integer(
        0,
        config=True,
        help="""kill this process if its parent dies.  On Windows, the argument
        specifies the HANDLE of the parent process, otherwise it is simply boolean.
        """)
    interrupt = Integer(0,
                        config=True,
                        help="""ONLY USED ON WINDOWS
        Interrupt this process when the parent is signaled.
        """)

    def init_crash_handler(self):
        # Install minimal exception handling
        sys.excepthook = FormattedTB(mode='Verbose',
                                     color_scheme='NoColor',
                                     ostream=sys.__stdout__)

    def init_poller(self):
        if sys.platform == 'win32':
            if self.interrupt or self.parent:
                self.poller = ParentPollerWindows(self.interrupt, self.parent)
        elif self.parent:
            self.poller = ParentPollerUnix()

    def _bind_socket(self, s, port):
        iface = '%s://%s' % (self.transport, self.ip)
        if self.transport == 'tcp':
            if port <= 0:
                port = s.bind_to_random_port(iface)
            else:
                s.bind("tcp://%s:%i" % (self.ip, port))
        elif self.transport == 'ipc':
            if port <= 0:
                port = 1
                path = "%s-%i" % (self.ip, port)
                while os.path.exists(path):
                    port = port + 1
                    path = "%s-%i" % (self.ip, port)
            else:
                path = "%s-%i" % (self.ip, port)
            s.bind("ipc://%s" % path)
        return port

    def load_connection_file(self):
        """load ip/port/hmac config from JSON connection file"""
        try:
            fname = filefind(self.connection_file,
                             ['.', self.profile_dir.security_dir])
        except IOError:
            self.log.debug("Connection file not found: %s",
                           self.connection_file)
            # This means I own it, so I will clean it up:
            atexit.register(self.cleanup_connection_file)
            return
        self.log.debug(u"Loading connection file %s", fname)
        with open(fname) as f:
            s = f.read()
        cfg = json.loads(s)
        self.transport = cfg.get('transport', self.transport)
        if self.ip == self._ip_default() and 'ip' in cfg:
            # not overridden by config or cl_args
            self.ip = cfg['ip']
        for channel in ('hb', 'shell', 'iopub', 'stdin'):
            name = channel + '_port'
            if getattr(self, name) == 0 and name in cfg:
                # not overridden by config or cl_args
                setattr(self, name, cfg[name])
        if 'key' in cfg:
            self.config.Session.key = str_to_bytes(cfg['key'])

    def write_connection_file(self):
        """write connection info to JSON file"""
        cf = self.abs_connection_file
        self.log.debug("Writing connection file: %s", cf)
        write_connection_file(cf,
                              ip=self.ip,
                              key=self.session.key,
                              transport=self.transport,
                              shell_port=self.shell_port,
                              stdin_port=self.stdin_port,
                              hb_port=self.hb_port,
                              iopub_port=self.iopub_port)

    def cleanup_connection_file(self):
        cf = self.abs_connection_file
        self.log.debug("Cleaning up connection file: %s", cf)
        try:
            os.remove(cf)
        except (IOError, OSError):
            pass

        self.cleanup_ipc_files()

    def cleanup_ipc_files(self):
        """cleanup ipc files if we wrote them"""
        if self.transport != 'ipc':
            return
        for port in (self.shell_port, self.iopub_port, self.stdin_port,
                     self.hb_port):
            ipcfile = "%s-%i" % (self.ip, port)
            try:
                os.remove(ipcfile)
            except (IOError, OSError):
                pass

    def init_connection_file(self):
        if not self.connection_file:
            self.connection_file = "kernel-%s.json" % os.getpid()
        try:
            self.load_connection_file()
        except Exception:
            self.log.error("Failed to load connection file: %r",
                           self.connection_file,
                           exc_info=True)
            self.exit(1)

    def init_sockets(self):
        # Create a context, a session, and the kernel sockets.
        self.log.info("Starting the kernel at pid: %i", os.getpid())
        context = zmq.Context.instance()
        # Uncomment this to try closing the context.
        # atexit.register(context.term)

        self.shell_socket = context.socket(zmq.ROUTER)
        self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
        self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)

        self.iopub_socket = context.socket(zmq.PUB)
        self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
        self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)

        self.stdin_socket = context.socket(zmq.ROUTER)
        self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
        self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)

    def init_heartbeat(self):
        """start the heart beating"""
        # heartbeat doesn't share context, because it mustn't be blocked
        # by the GIL, which is accessed by libzmq when freeing zero-copy messages
        hb_ctx = zmq.Context()
        self.heartbeat = Heartbeat(hb_ctx,
                                   (self.transport, self.ip, self.hb_port))
        self.hb_port = self.heartbeat.port
        self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
        self.heartbeat.start()

        # Helper to make it easier to connect to an existing kernel.
        # set log-level to critical, to make sure it is output
        self.log.critical("To connect another client to this kernel, use:")

    def log_connection_info(self):
        """display connection info, and store ports"""
        basename = os.path.basename(self.connection_file)
        if basename == self.connection_file or \
            os.path.dirname(self.connection_file) == self.profile_dir.security_dir:
            # use shortname
            tail = basename
            if self.profile != 'default':
                tail += " --profile %s" % self.profile
        else:
            tail = self.connection_file
        self.log.critical("--existing %s", tail)

        self.ports = dict(shell=self.shell_port,
                          iopub=self.iopub_port,
                          stdin=self.stdin_port,
                          hb=self.hb_port)

    def init_session(self):
        """create our session object"""
        default_secure(self.config)
        self.session = Session(config=self.config, username=u'kernel')

    def init_blackhole(self):
        """redirects stdout/stderr to devnull if necessary"""
        if self.no_stdout or self.no_stderr:
            blackhole = open(os.devnull, 'w')
            if self.no_stdout:
                sys.stdout = sys.__stdout__ = blackhole
            if self.no_stderr:
                sys.stderr = sys.__stderr__ = blackhole

    def init_io(self):
        """Redirect input streams and set a display hook."""
        if self.outstream_class:
            outstream_factory = import_item(str(self.outstream_class))
            sys.stdout = outstream_factory(self.session, self.iopub_socket,
                                           u'stdout')
            sys.stderr = outstream_factory(self.session, self.iopub_socket,
                                           u'stderr')
        if self.displayhook_class:
            displayhook_factory = import_item(str(self.displayhook_class))
            sys.displayhook = displayhook_factory(self.session,
                                                  self.iopub_socket)

    def init_signal(self):
        signal.signal(signal.SIGINT, signal.SIG_IGN)

    def init_kernel(self):
        """Create the Kernel object itself"""
        shell_stream = ZMQStream(self.shell_socket)

        kernel = Kernel(
            config=self.config,
            session=self.session,
            shell_streams=[shell_stream],
            iopub_socket=self.iopub_socket,
            stdin_socket=self.stdin_socket,
            log=self.log,
            profile_dir=self.profile_dir,
        )
        kernel.record_ports(self.ports)
        self.kernel = kernel

    def init_gui_pylab(self):
        """Enable GUI event loop integration, taking pylab into account."""

        # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
        # to ensure that any exception is printed straight to stderr.
        # Normally _showtraceback associates the reply with an execution,
        # which means frontends will never draw it, as this exception
        # is not associated with any execute request.

        shell = self.shell
        _showtraceback = shell._showtraceback
        try:
            # replace pyerr-sending traceback with stderr
            def print_tb(etype, evalue, stb):
                print("GUI event loop or pylab initialization failed",
                      file=io.stderr)
                print(shell.InteractiveTB.stb2text(stb), file=io.stderr)

            shell._showtraceback = print_tb
            InteractiveShellApp.init_gui_pylab(self)
        finally:
            shell._showtraceback = _showtraceback

    def init_shell(self):
        self.shell = self.kernel.shell
        self.shell.configurables.append(self)

    @catch_config_error
    def initialize(self, argv=None):
        super(IPKernelApp, self).initialize(argv)
        self.init_blackhole()
        self.init_connection_file()
        self.init_session()
        self.init_poller()
        self.init_sockets()
        self.init_heartbeat()
        # writing/displaying connection info must be *after* init_sockets/heartbeat
        self.log_connection_info()
        self.write_connection_file()
        self.init_io()
        self.init_signal()
        self.init_kernel()
        # shell init steps
        self.init_path()
        self.init_shell()
        self.init_gui_pylab()
        self.init_extensions()
        self.init_code()
        # flush stdout/stderr, so that anything written to these streams during
        # initialization do not get associated with the first execution request
        sys.stdout.flush()
        sys.stderr.flush()

    def start(self):
        if self.poller is not None:
            self.poller.start()
        self.kernel.start()
        try:
            ioloop.IOLoop.instance().start()
        except KeyboardInterrupt:
            pass
Beispiel #21
0
class IPKernelApp(KernelApp, InteractiveShellApp):
    name = 'ipkernel'

    aliases = Dict(aliases)
    flags = Dict(flags)
    classes = [Kernel, ZMQInteractiveShell, ProfileDir, Session]
    # configurables
    pylab = CaselessStrEnum(
        ['tk', 'qt', 'wx', 'gtk', 'osx', 'inline', 'auto'],
        config=True,
        help="""Pre-load matplotlib and numpy for interactive use,
        selecting a particular matplotlib backend and loop integration.
        """)

    @catch_config_error
    def initialize(self, argv=None):
        super(IPKernelApp, self).initialize(argv)
        self.init_shell()
        self.init_extensions()
        self.init_code()

    def init_kernel(self):

        kernel = Kernel(
            config=self.config,
            session=self.session,
            shell_socket=self.shell_socket,
            iopub_socket=self.iopub_socket,
            stdin_socket=self.stdin_socket,
            log=self.log,
            profile_dir=self.profile_dir,
        )
        self.kernel = kernel
        kernel.record_ports(self.ports)
        shell = kernel.shell
        if self.pylab:
            try:
                gui, backend = pylabtools.find_gui_and_backend(self.pylab)
                shell.enable_pylab(gui, import_all=self.pylab_import_all)
            except Exception:
                self.log.error("Pylab initialization failed", exc_info=True)
                # print exception straight to stdout, because normally
                # _showtraceback associates the reply with an execution,
                # which means frontends will never draw it, as this exception
                # is not associated with any execute request.

                # replace pyerr-sending traceback with stdout
                _showtraceback = shell._showtraceback

                def print_tb(etype, evalue, stb):
                    print(
                        "Error initializing pylab, pylab mode will not "
                        "be active",
                        file=io.stderr)
                    print(shell.InteractiveTB.stb2text(stb), file=io.stdout)

                shell._showtraceback = print_tb

                # send the traceback over stdout
                shell.showtraceback(tb_offset=0)

                # restore proper _showtraceback method
                shell._showtraceback = _showtraceback

    def init_shell(self):
        self.shell = self.kernel.shell
        self.shell.configurables.append(self)
Beispiel #22
0
class KernelManager(HasTraits):
    """ Manages a kernel for a frontend.

    The SUB channel is for the frontend to receive messages published by the
    kernel.

    The REQ channel is for the frontend to make requests of the kernel.

    The REP channel is for the kernel to request stdin (raw_input) from the
    frontend.
    """
    # config object for passing to child configurables
    config = Instance(Config)

    # The PyZMQ Context to use for communication with the kernel.
    context = Instance(zmq.Context)

    def _context_default(self):
        return zmq.Context.instance()

    # The Session to use for communication with the kernel.
    session = Instance(Session)

    # The kernel process with which the KernelManager is communicating.
    kernel = Instance(Popen)

    # The addresses for the communication channels.
    connection_file = Unicode('')

    transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp')

    ip = Unicode(LOCALHOST)

    def _ip_changed(self, name, old, new):
        if new == '*':
            self.ip = '0.0.0.0'

    shell_port = Integer(0)
    iopub_port = Integer(0)
    stdin_port = Integer(0)
    hb_port = Integer(0)

    # The classes to use for the various channels.
    shell_channel_class = Type(ShellSocketChannel)
    sub_channel_class = Type(SubSocketChannel)
    stdin_channel_class = Type(StdInSocketChannel)
    hb_channel_class = Type(HBSocketChannel)

    # Protected traits.
    _launch_args = Any
    _shell_channel = Any
    _sub_channel = Any
    _stdin_channel = Any
    _hb_channel = Any
    _connection_file_written = Bool(False)

    def __init__(self, **kwargs):
        super(KernelManager, self).__init__(**kwargs)
        if self.session is None:
            self.session = Session(config=self.config)

    def __del__(self):
        self.cleanup_connection_file()

    #--------------------------------------------------------------------------
    # Channel management methods:
    #--------------------------------------------------------------------------

    def start_channels(self, shell=True, sub=True, stdin=True, hb=True):
        """Starts the channels for this kernel.

        This will create the channels if they do not exist and then start
        them. If port numbers of 0 are being used (random ports) then you
        must first call :method:`start_kernel`. If the channels have been
        stopped and you call this, :class:`RuntimeError` will be raised.
        """
        if shell:
            self.shell_channel.start()
        if sub:
            self.sub_channel.start()
        if stdin:
            self.stdin_channel.start()
            self.shell_channel.allow_stdin = True
        else:
            self.shell_channel.allow_stdin = False
        if hb:
            self.hb_channel.start()

    def stop_channels(self):
        """Stops all the running channels for this kernel.
        """
        if self.shell_channel.is_alive():
            self.shell_channel.stop()
        if self.sub_channel.is_alive():
            self.sub_channel.stop()
        if self.stdin_channel.is_alive():
            self.stdin_channel.stop()
        if self.hb_channel.is_alive():
            self.hb_channel.stop()

    @property
    def channels_running(self):
        """Are any of the channels created and running?"""
        return (self.shell_channel.is_alive() or self.sub_channel.is_alive()
                or self.stdin_channel.is_alive() or self.hb_channel.is_alive())

    #--------------------------------------------------------------------------
    # Kernel process management methods:
    #--------------------------------------------------------------------------

    def cleanup_connection_file(self):
        """cleanup connection file *if we wrote it*

        Will not raise if the connection file was already removed somehow.
        """
        if self._connection_file_written:
            # cleanup connection files on full shutdown of kernel we started
            self._connection_file_written = False
            try:
                os.remove(self.connection_file)
            except (IOError, OSError):
                pass

            self._cleanup_ipc_files()

    def _cleanup_ipc_files(self):
        """cleanup ipc files if we wrote them"""
        if self.transport != 'ipc':
            return
        for port in (self.shell_port, self.iopub_port, self.stdin_port,
                     self.hb_port):
            ipcfile = "%s-%i" % (self.ip, port)
            try:
                os.remove(ipcfile)
            except (IOError, OSError):
                pass

    def load_connection_file(self):
        """load connection info from JSON dict in self.connection_file"""
        with open(self.connection_file) as f:
            cfg = json.loads(f.read())

        from pprint import pprint
        pprint(cfg)
        self.transport = cfg.get('transport', 'tcp')
        self.ip = cfg['ip']
        self.shell_port = cfg['shell_port']
        self.stdin_port = cfg['stdin_port']
        self.iopub_port = cfg['iopub_port']
        self.hb_port = cfg['hb_port']
        self.session.key = str_to_bytes(cfg['key'])

    def write_connection_file(self):
        """write connection info to JSON dict in self.connection_file"""
        if self._connection_file_written:
            return
        self.connection_file, cfg = write_connection_file(
            self.connection_file,
            transport=self.transport,
            ip=self.ip,
            key=self.session.key,
            stdin_port=self.stdin_port,
            iopub_port=self.iopub_port,
            shell_port=self.shell_port,
            hb_port=self.hb_port)
        # write_connection_file also sets default ports:
        self.shell_port = cfg['shell_port']
        self.stdin_port = cfg['stdin_port']
        self.iopub_port = cfg['iopub_port']
        self.hb_port = cfg['hb_port']

        self._connection_file_written = True

    def start_kernel(self, **kw):
        """Starts a kernel process and configures the manager to use it.

        If random ports (port=0) are being used, this method must be called
        before the channels are created.

        Parameters:
        -----------
        launcher : callable, optional (default None)
             A custom function for launching the kernel process (generally a
             wrapper around ``entry_point.base_launch_kernel``). In most cases,
             it should not be necessary to use this parameter.

        **kw : optional
             See respective options for IPython and Python kernels.
        """
        if self.transport == 'tcp' and self.ip not in LOCAL_IPS:
            raise RuntimeError(
                "Can only launch a kernel on a local interface. "
                "Make sure that the '*_address' attributes are "
                "configured properly. "
                "Currently valid addresses are: %s" % LOCAL_IPS)

        # write connection file / get default ports
        self.write_connection_file()

        self._launch_args = kw.copy()
        launch_kernel = kw.pop('launcher', None)
        if launch_kernel is None:
            # from ipkernel import launch_kernel
            from ext_entry_point import launch_kernel
        self.kernel = launch_kernel(fname=self.connection_file, **kw)

    def shutdown_kernel(self, restart=False):
        """ Attempts to the stop the kernel process cleanly.

        If the kernel cannot be stopped and the kernel is local, it is killed.
        """
        # FIXME: Shutdown does not work on Windows due to ZMQ errors!
        if sys.platform == 'win32':
            self.kill_kernel()
            return

        # Pause the heart beat channel if it exists.
        if self._hb_channel is not None:
            self._hb_channel.pause()

        # Don't send any additional kernel kill messages immediately, to give
        # the kernel a chance to properly execute shutdown actions. Wait for at
        # most 1s, checking every 0.1s.
        self.shell_channel.shutdown(restart=restart)
        for i in range(10):
            if self.is_alive:
                time.sleep(0.1)
            else:
                break
        else:
            # OK, we've waited long enough.
            if self.has_kernel:
                self.kill_kernel()

        if not restart and self._connection_file_written:
            # cleanup connection files on full shutdown of kernel we started
            self._connection_file_written = False
            try:
                os.remove(self.connection_file)
            except IOError:
                pass

    def restart_kernel(self, now=False, **kw):
        """Restarts a kernel with the arguments that were used to launch it.

        If the old kernel was launched with random ports, the same ports will be
        used for the new kernel.

        Parameters
        ----------
        now : bool, optional
            If True, the kernel is forcefully restarted *immediately*, without
            having a chance to do any cleanup action.  Otherwise the kernel is
            given 1s to clean up before a forceful restart is issued.

            In all cases the kernel is restarted, the only difference is whether
            it is given a chance to perform a clean shutdown or not.

        **kw : optional
            Any options specified here will replace those used to launch the
            kernel.
        """
        if self._launch_args is None:
            raise RuntimeError("Cannot restart the kernel. "
                               "No previous call to 'start_kernel'.")
        else:
            # Stop currently running kernel.
            if self.has_kernel:
                if now:
                    self.kill_kernel()
                else:
                    self.shutdown_kernel(restart=True)

            # Start new kernel.
            self._launch_args.update(kw)
            self.start_kernel(**self._launch_args)

            # FIXME: Messages get dropped in Windows due to probable ZMQ bug
            # unless there is some delay here.
            if sys.platform == 'win32':
                time.sleep(0.2)

    @property
    def has_kernel(self):
        """Returns whether a kernel process has been specified for the kernel
        manager.
        """
        return self.kernel is not None

    def kill_kernel(self):
        """ Kill the running kernel.

        This method blocks until the kernel process has terminated.
        """
        if self.has_kernel:
            # Pause the heart beat channel if it exists.
            if self._hb_channel is not None:
                self._hb_channel.pause()

            # Signal the kernel to terminate (sends SIGKILL on Unix and calls
            # TerminateProcess() on Win32).
            try:
                self.kernel.kill()
            except OSError as e:
                # In Windows, we will get an Access Denied error if the process
                # has already terminated. Ignore it.
                if sys.platform == 'win32':
                    if e.winerror != 5:
                        raise
                # On Unix, we may get an ESRCH error if the process has already
                # terminated. Ignore it.
                else:
                    from errno import ESRCH
                    if e.errno != ESRCH:
                        raise

            # Block until the kernel terminates.
            self.kernel.wait()
            self.kernel = None
        else:
            raise RuntimeError("Cannot kill kernel. No kernel is running!")

    def interrupt_kernel(self):
        """ Interrupts the kernel.

        Unlike ``signal_kernel``, this operation is well supported on all
        platforms.
        """
        if self.has_kernel:
            if sys.platform == 'win32':
                from parentpoller import ParentPollerWindows as Poller
                Poller.send_interrupt(self.kernel.win32_interrupt_event)
            else:
                self.kernel.send_signal(signal.SIGINT)
        else:
            raise RuntimeError(
                "Cannot interrupt kernel. No kernel is running!")

    def signal_kernel(self, signum):
        """ Sends a signal to the kernel.

        Note that since only SIGTERM is supported on Windows, this function is
        only useful on Unix systems.
        """
        if self.has_kernel:
            self.kernel.send_signal(signum)
        else:
            raise RuntimeError("Cannot signal kernel. No kernel is running!")

    @property
    def is_alive(self):
        """Is the kernel process still running?"""
        if self.has_kernel:
            if self.kernel.poll() is None:
                return True
            else:
                return False
        elif self._hb_channel is not None:
            # We didn't start the kernel with this KernelManager so we
            # use the heartbeat.
            return self._hb_channel.is_beating()
        else:
            # no heartbeat and not local, we can't tell if it's running,
            # so naively return True
            return True

    #--------------------------------------------------------------------------
    # Channels used for communication with the kernel:
    #--------------------------------------------------------------------------

    def _make_url(self, port):
        """make a zmq url with a port"""
        if self.transport == 'tcp':
            return "tcp://%s:%i" % (self.ip, port)
        else:
            return "%s://%s-%s" % (self.transport, self.ip, port)

    @property
    def shell_channel(self):
        """Get the REQ socket channel object to make requests of the kernel."""
        if self._shell_channel is None:
            self._shell_channel = self.shell_channel_class(
                self.context,
                self.session,
                self._make_url(self.shell_port),
            )
        return self._shell_channel

    @property
    def sub_channel(self):
        """Get the SUB socket channel object."""
        if self._sub_channel is None:
            self._sub_channel = self.sub_channel_class(
                self.context,
                self.session,
                self._make_url(self.iopub_port),
            )
        return self._sub_channel

    @property
    def stdin_channel(self):
        """Get the REP socket channel object to handle stdin (raw_input)."""
        if self._stdin_channel is None:
            self._stdin_channel = self.stdin_channel_class(
                self.context,
                self.session,
                self._make_url(self.stdin_port),
            )
        return self._stdin_channel

    @property
    def hb_channel(self):
        """Get the heartbeat socket channel object to check that the
        kernel is alive."""
        if self._hb_channel is None:
            self._hb_channel = self.hb_channel_class(
                self.context,
                self.session,
                self._make_url(self.hb_port),
            )
        return self._hb_channel
Beispiel #23
0
class ExecutionContext(LoggingConfigurable):

    # These first are valid for the time of the existance of this contex
    output = Instance(klass=TemporaryOutputDocument,
                      allow_none=True,
                      config=False,
                      help="current output document")

    chunk_number = Integer(0,
                           config=False,
                           allow_none=False,
                           help="current chunk number")

    def _chunk_number_changed(self, name, old, new):
        if old != new:
            self.chunk_label = None

    enabled_documents = List([],
                             config=False,
                             help="Names for enabled documents.")

    # the following are valid for the time of one code execution
    chunk_label = Unicode(None,
                          config=False,
                          allow_none=True,
                          help="current chunk label")
    chunk_plot_number = Integer(0,
                                config=False,
                                allow_none=False,
                                help="current plot number in this chunk")

    def _chunk_label_changed(self, name, old, new):
        if old != new:
            self.chunk_plot_number = 0

    echo = Bool(True,
                config=False,
                help="If False, knitpy will not display the code in the code "
                "chunk above it's results in the final document.")

    results = CaselessStrEnum(
        default_value="markup",
        values=["markup", "hide", "hold", "asis"],
        allow_none=False,
        config=False,
        help="If 'hide', knitpy will not display the code’s results in the "
        "final document. If 'hold', knitpy will delay displaying all  "
        "output pieces until the end of the chunk. If 'asis', "
        "knitpy will pass through results without reformatting them "
        "(useful if results return raw HTML, etc.)")

    include = Bool(True,
                   config=False,
                   help="If False, knitpy will will run the chunk but not "
                   "include the chunk in the final document.")

    comment = Unicode(
        default_value="##",
        config=False,
        allow_none=True,
        help=
        "Prefix which is added to all (text) output; None or empty string will "
        "result in no prefix")

    mode = CaselessStrEnum(default_value=None,
                           values=["inline", "block"],
                           allow_none=True,
                           config=False,
                           help="current mode: inline or "
                           "block")

    engine = Instance(klass=BaseKnitpyEngine,
                      allow_none=True,
                      config=False,
                      help="current engine")

    def __init__(self, output, **kwargs):
        super(ExecutionContext, self).__init__(**kwargs)
        self.output = output
        output.context = self

    def execution_started(self):
        self.chunk_number += 1

    def execution_finished(self):
        self.output.flush()
        reset_needed = [
            "engine", "mode"
            "chunk_label", "comment", "include", "echo", "include", "results"
        ]
        for name in self.trait_names():
            if name in reset_needed:
                self.traits()[name].set_default_value(self)
Beispiel #24
0
class InteractiveShellApp(Configurable):
    """A Mixin for applications that start InteractiveShell instances.
    
    Provides configurables for loading extensions and executing files
    as part of configuring a Shell environment.

    The following methods should be called by the :meth:`initialize` method
    of the subclass:

      - :meth:`init_path`
      - :meth:`init_shell` (to be implemented by the subclass)
      - :meth:`init_gui_pylab`
      - :meth:`init_extensions`
      - :meth:`init_code`
    """
    extensions = List(
        Unicode,
        config=True,
        help="A list of dotted module names of IPython extensions to load.")
    extra_extension = Unicode(
        '',
        config=True,
        help="dotted module name of an IPython extension to load.")

    def _extra_extension_changed(self, name, old, new):
        if new:
            # add to self.extensions
            self.extensions.append(new)

    # Extensions that are always loaded (not configurable)
    default_extensions = List(Unicode, [u'storemagic'], config=False)

    hide_initial_ns = Bool(
        True,
        config=True,
        help=
        """Should variables loaded at startup (by startup files, exec_lines, etc.)
        be hidden from tools like %who?""")

    exec_files = List(Unicode,
                      config=True,
                      help="""List of files to run at IPython startup.""")
    exec_PYTHONSTARTUP = Bool(
        True,
        config=True,
        help="""Run the file referenced by the PYTHONSTARTUP environment
        variable at IPython startup.""")
    file_to_run = Unicode('', config=True, help="""A file to be run""")

    exec_lines = List(Unicode,
                      config=True,
                      help="""lines of code to run at IPython startup.""")
    code_to_run = Unicode('',
                          config=True,
                          help="Execute the given command string.")
    module_to_run = Unicode('',
                            config=True,
                            help="Run the module as a script.")
    gui = CaselessStrEnum(
        gui_keys,
        config=True,
        help="Enable GUI event loop integration with any of {0}.".format(
            gui_keys))
    matplotlib = CaselessStrEnum(
        backend_keys,
        config=True,
        help="""Configure matplotlib for interactive use with
        the default matplotlib backend.""")
    pylab = CaselessStrEnum(
        backend_keys,
        config=True,
        help="""Pre-load matplotlib and numpy for interactive use,
        selecting a particular matplotlib backend and loop integration.
        """)
    pylab_import_all = Bool(
        True,
        config=True,
        help=
        """If true, IPython will populate the user namespace with numpy, pylab, etc.
        and an ``import *`` is done from numpy and pylab, when using pylab mode.
        
        When False, pylab mode should not import any names into the user namespace.
        """)
    shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')

    user_ns = Instance(dict, args=None, allow_none=True)

    def _user_ns_changed(self, name, old, new):
        if self.shell is not None:
            self.shell.user_ns = new
            self.shell.init_user_ns()

    def init_path(self):
        """Add current working directory, '', to sys.path"""
        if sys.path[0] != '':
            sys.path.insert(0, '')

    def init_shell(self):
        raise NotImplementedError("Override in subclasses")

    def init_gui_pylab(self):
        """Enable GUI event loop integration, taking pylab into account."""
        enable = False
        shell = self.shell
        if self.pylab:
            enable = lambda key: shell.enable_pylab(
                key, import_all=self.pylab_import_all)
            key = self.pylab
        elif self.matplotlib:
            enable = shell.enable_matplotlib
            key = self.matplotlib
        elif self.gui:
            enable = shell.enable_gui
            key = self.gui

        if not enable:
            return

        try:
            r = enable(key)
        except ImportError:
            self.log.warn(
                "Eventloop or matplotlib integration failed. Is matplotlib installed?"
            )
            self.shell.showtraceback()
            return
        except Exception:
            self.log.warn("GUI event loop or pylab initialization failed")
            self.shell.showtraceback()
            return

        if isinstance(r, tuple):
            gui, backend = r[:2]
            self.log.info(
                "Enabling GUI event loop integration, "
                "eventloop=%s, matplotlib=%s", gui, backend)
            if key == "auto":
                print("Using matplotlib backend: %s" % backend)
        else:
            gui = r
            self.log.info(
                "Enabling GUI event loop integration, "
                "eventloop=%s", gui)

    def init_extensions(self):
        """Load all IPython extensions in IPythonApp.extensions.

        This uses the :meth:`ExtensionManager.load_extensions` to load all
        the extensions listed in ``self.extensions``.
        """
        try:
            self.log.debug("Loading IPython extensions...")
            extensions = self.default_extensions + self.extensions
            for ext in extensions:
                try:
                    self.log.info("Loading IPython extension: %s" % ext)
                    self.shell.extension_manager.load_extension(ext)
                except:
                    self.log.warn("Error in loading extension: %s" % ext +
                                  "\nCheck your config files in %s" %
                                  self.profile_dir.location)
                    self.shell.showtraceback()
        except:
            self.log.warn("Unknown error in loading extensions:")
            self.shell.showtraceback()

    def init_code(self):
        """run the pre-flight code, specified via exec_lines"""
        self._run_startup_files()
        self._run_exec_lines()
        self._run_exec_files()

        # Hide variables defined here from %who etc.
        if self.hide_initial_ns:
            self.shell.user_ns_hidden.update(self.shell.user_ns)

        # command-line execution (ipython -i script.py, ipython -m module)
        # should *not* be excluded from %whos
        self._run_cmd_line_code()
        self._run_module()

        # flush output, so itwon't be attached to the first cell
        sys.stdout.flush()
        sys.stderr.flush()

    def _run_exec_lines(self):
        """Run lines of code in IPythonApp.exec_lines in the user's namespace."""
        if not self.exec_lines:
            return
        try:
            self.log.debug("Running code from IPythonApp.exec_lines...")
            for line in self.exec_lines:
                try:
                    self.log.info("Running code in user namespace: %s" % line)
                    self.shell.run_cell(line, store_history=False)
                except:
                    self.log.warn("Error in executing line in user "
                                  "namespace: %s" % line)
                    self.shell.showtraceback()
        except:
            self.log.warn("Unknown error in handling IPythonApp.exec_lines:")
            self.shell.showtraceback()

    def _exec_file(self, fname, shell_futures=False):
        try:
            full_filename = filefind(fname, [u'.', self.ipython_dir])
        except IOError as e:
            self.log.warn("File not found: %r" % fname)
            return
        # Make sure that the running script gets a proper sys.argv as if it
        # were run from a system shell.
        save_argv = sys.argv
        sys.argv = [full_filename] + self.extra_args[1:]
        # protect sys.argv from potential unicode strings on Python 2:
        if not py3compat.PY3:
            sys.argv = [py3compat.cast_bytes(a) for a in sys.argv]
        try:
            if os.path.isfile(full_filename):
                self.log.info("Running file in user namespace: %s" %
                              full_filename)
                # Ensure that __file__ is always defined to match Python
                # behavior.
                with preserve_keys(self.shell.user_ns, '__file__'):
                    self.shell.user_ns['__file__'] = fname
                    if full_filename.endswith('.ipy'):
                        self.shell.safe_execfile_ipy(
                            full_filename, shell_futures=shell_futures)
                    else:
                        # default to python, even without extension
                        self.shell.safe_execfile(full_filename,
                                                 self.shell.user_ns,
                                                 shell_futures=shell_futures)
        finally:
            sys.argv = save_argv

    def _run_startup_files(self):
        """Run files from profile startup directory"""
        startup_dir = self.profile_dir.startup_dir
        startup_files = []

        if self.exec_PYTHONSTARTUP and os.environ.get('PYTHONSTARTUP', False) and \
                not (self.file_to_run or self.code_to_run or self.module_to_run):
            python_startup = os.environ['PYTHONSTARTUP']
            self.log.debug("Running PYTHONSTARTUP file %s...", python_startup)
            try:
                self._exec_file(python_startup)
            except:
                self.log.warn(
                    "Unknown error in handling PYTHONSTARTUP file %s:",
                    python_startup)
                self.shell.showtraceback()
            finally:
                # Many PYTHONSTARTUP files set up the readline completions,
                # but this is often at odds with IPython's own completions.
                # Do not allow PYTHONSTARTUP to set up readline.
                if self.shell.has_readline:
                    self.shell.set_readline_completer()

        startup_files += glob.glob(os.path.join(startup_dir, '*.py'))
        startup_files += glob.glob(os.path.join(startup_dir, '*.ipy'))
        if not startup_files:
            return

        self.log.debug("Running startup files from %s...", startup_dir)
        try:
            for fname in sorted(startup_files):
                self._exec_file(fname)
        except:
            self.log.warn("Unknown error in handling startup files:")
            self.shell.showtraceback()

    def _run_exec_files(self):
        """Run files from IPythonApp.exec_files"""
        if not self.exec_files:
            return

        self.log.debug("Running files in IPythonApp.exec_files...")
        try:
            for fname in self.exec_files:
                self._exec_file(fname)
        except:
            self.log.warn("Unknown error in handling IPythonApp.exec_files:")
            self.shell.showtraceback()

    def _run_cmd_line_code(self):
        """Run code or file specified at the command-line"""
        if self.code_to_run:
            line = self.code_to_run
            try:
                self.log.info("Running code given at command line (c=): %s" %
                              line)
                self.shell.run_cell(line, store_history=False)
            except:
                self.log.warn("Error in executing line in user namespace: %s" %
                              line)
                self.shell.showtraceback()

        # Like Python itself, ignore the second if the first of these is present
        elif self.file_to_run:
            fname = self.file_to_run
            try:
                self._exec_file(fname, shell_futures=True)
            except:
                self.log.warn("Error in executing file in user namespace: %s" %
                              fname)
                self.shell.showtraceback()

    def _run_module(self):
        """Run module specified at the command-line."""
        if self.module_to_run:
            # Make sure that the module gets a proper sys.argv as if it were
            # run using `python -m`.
            save_argv = sys.argv
            sys.argv = [sys.executable] + self.extra_args
            try:
                self.shell.safe_run_module(self.module_to_run,
                                           self.shell.user_ns)
            finally:
                sys.argv = save_argv
Beispiel #25
0
class ConnectionFileMixin(Configurable):
    """Mixin for configurable classes that work with connection files"""

    # The addresses for the communication channels
    connection_file = Unicode('')
    _connection_file_written = Bool(False)

    transport = CaselessStrEnum(['tcp', 'ipc'],
                                default_value='tcp',
                                config=True)

    ip = Unicode(config=True,
                 help="""Set the kernel\'s IP address [default localhost].
        If the IP address is something other than localhost, then
        Consoles on other machines will be able to connect
        to the Kernel, so be careful!""")

    def _ip_default(self):
        if self.transport == 'ipc':
            if self.connection_file:
                return os.path.splitext(self.connection_file)[0] + '-ipc'
            else:
                return 'kernel-ipc'
        else:
            return localhost()

    def _ip_changed(self, name, old, new):
        if new == '*':
            self.ip = '0.0.0.0'

    # protected traits

    shell_port = Integer(0)
    iopub_port = Integer(0)
    stdin_port = Integer(0)
    control_port = Integer(0)
    hb_port = Integer(0)

    @property
    def ports(self):
        return [getattr(self, name) for name in port_names]

    #--------------------------------------------------------------------------
    # Connection and ipc file management
    #--------------------------------------------------------------------------

    def get_connection_info(self):
        """return the connection info as a dict"""
        return dict(
            transport=self.transport,
            ip=self.ip,
            shell_port=self.shell_port,
            iopub_port=self.iopub_port,
            stdin_port=self.stdin_port,
            hb_port=self.hb_port,
            control_port=self.control_port,
            signature_scheme=self.session.signature_scheme,
            key=self.session.key,
        )

    def cleanup_connection_file(self):
        """Cleanup connection file *if we wrote it*

        Will not raise if the connection file was already removed somehow.
        """
        if self._connection_file_written:
            # cleanup connection files on full shutdown of kernel we started
            self._connection_file_written = False
            try:
                os.remove(self.connection_file)
            except (IOError, OSError, AttributeError):
                pass

    def cleanup_ipc_files(self):
        """Cleanup ipc files if we wrote them."""
        if self.transport != 'ipc':
            return
        for port in self.ports:
            ipcfile = "%s-%i" % (self.ip, port)
            try:
                os.remove(ipcfile)
            except (IOError, OSError):
                pass

    def write_connection_file(self):
        """Write connection info to JSON dict in self.connection_file."""
        if self._connection_file_written:
            return

        self.connection_file, cfg = write_connection_file(
            self.connection_file,
            transport=self.transport,
            ip=self.ip,
            key=self.session.key,
            stdin_port=self.stdin_port,
            iopub_port=self.iopub_port,
            shell_port=self.shell_port,
            hb_port=self.hb_port,
            control_port=self.control_port,
            signature_scheme=self.session.signature_scheme,
        )
        # write_connection_file also sets default ports:
        for name in port_names:
            setattr(self, name, cfg[name])

        self._connection_file_written = True

    def load_connection_file(self):
        """Load connection info from JSON dict in self.connection_file."""
        with open(self.connection_file) as f:
            cfg = json.loads(f.read())

        self.transport = cfg.get('transport', 'tcp')
        self.ip = cfg['ip']
        for name in port_names:
            setattr(self, name, cfg[name])
        if 'key' in cfg:
            self.session.key = str_to_bytes(cfg['key'])
        if cfg.get('signature_scheme'):
            self.session.signature_scheme = cfg['signature_scheme']

    #--------------------------------------------------------------------------
    # Creating connected sockets
    #--------------------------------------------------------------------------

    def _make_url(self, channel):
        """Make a ZeroMQ URL for a given channel."""
        transport = self.transport
        ip = self.ip
        port = getattr(self, '%s_port' % channel)

        if transport == 'tcp':
            return "tcp://%s:%i" % (ip, port)
        else:
            return "%s://%s-%s" % (transport, ip, port)

    def _create_connected_socket(self, channel, identity=None):
        """Create a zmq Socket and connect it to the kernel."""
        url = self._make_url(channel)
        socket_type = channel_socket_types[channel]
        self.log.info("Connecting to: %s" % url)
        sock = self.context.socket(socket_type)
        if identity:
            sock.identity = identity
        sock.connect(url)
        return sock

    def connect_iopub(self, identity=None):
        """return zmq Socket connected to the IOPub channel"""
        sock = self._create_connected_socket('iopub', identity=identity)
        sock.setsockopt(zmq.SUBSCRIBE, b'')
        return sock

    def connect_shell(self, identity=None):
        """return zmq Socket connected to the Shell channel"""
        return self._create_connected_socket('shell', identity=identity)

    def connect_stdin(self, identity=None):
        """return zmq Socket connected to the StdIn channel"""
        return self._create_connected_socket('stdin', identity=identity)

    def connect_hb(self, identity=None):
        """return zmq Socket connected to the Heartbeat channel"""
        return self._create_connected_socket('hb', identity=identity)

    def connect_control(self, identity=None):
        """return zmq Socket connected to the Heartbeat channel"""
        return self._create_connected_socket('control', identity=identity)
class NbConvertApp(BaseIPythonApplication):
    """Application used to convert from notebook file type (``*.ipynb``)"""

    name = 'ipython-nbconvert'
    aliases = nbconvert_aliases
    flags = nbconvert_flags

    def _log_level_default(self):
        return logging.INFO

    def _classes_default(self):
        classes = [NbConvertBase, ProfileDir]
        for pkg in (exporters, preprocessors, writers, postprocessors):
            for name in dir(pkg):
                cls = getattr(pkg, name)
                if isinstance(cls, type) and issubclass(cls, Configurable):
                    classes.append(cls)

        return classes

    description = Unicode(
        u"""This application is used to convert notebook files (*.ipynb)
        to various other formats.

        WARNING: THE COMMANDLINE INTERFACE MAY CHANGE IN FUTURE RELEASES.""")

    output_base = Unicode('',
                          config=True,
                          help='''overwrite base name use for output files.
            can only be used when converting one notebook at a time.
            ''')

    use_output_suffix = Bool(
        True,
        config=True,
        help="""Whether to apply a suffix prior to the extension (only relevant
            when converting to notebook format). The suffix is determined by
            the exporter, and is usually '.nbconvert'.""")

    examples = Unicode(u"""
        The simplest way to use nbconvert is

        > ipython nbconvert mynotebook.ipynb

        which will convert mynotebook.ipynb to the default format (probably HTML).

        You can specify the export format with `--to`.
        Options include {0}

        > ipython nbconvert --to latex mynotebook.ipynb

        Both HTML and LaTeX support multiple output templates. LaTeX includes
        'base', 'article' and 'report'.  HTML includes 'basic' and 'full'. You
        can specify the flavor of the format used.

        > ipython nbconvert --to html --template basic mynotebook.ipynb

        You can also pipe the output to stdout, rather than a file

        > ipython nbconvert mynotebook.ipynb --stdout

        PDF is generated via latex

        > ipython nbconvert mynotebook.ipynb --to pdf

        You can get (and serve) a Reveal.js-powered slideshow

        > ipython nbconvert myslides.ipynb --to slides --post serve

        Multiple notebooks can be given at the command line in a couple of
        different ways:

        > ipython nbconvert notebook*.ipynb
        > ipython nbconvert notebook1.ipynb notebook2.ipynb

        or you can specify the notebooks list in a config file, containing::

            c.NbConvertApp.notebooks = ["my_notebook.ipynb"]

        > ipython nbconvert --config mycfg.py
        """.format(get_export_names()))

    # Writer specific variables
    writer = Instance('IPython.nbconvert.writers.base.WriterBase',
                      help="""Instance of the writer class used to write the
                      results of the conversion.""")
    writer_class = DottedObjectName('FilesWriter',
                                    config=True,
                                    help="""Writer class used to write the
                                    results of the conversion""")
    writer_aliases = {
        'fileswriter': 'IPython.nbconvert.writers.files.FilesWriter',
        'debugwriter': 'IPython.nbconvert.writers.debug.DebugWriter',
        'stdoutwriter': 'IPython.nbconvert.writers.stdout.StdoutWriter'
    }
    writer_factory = Type()

    def _writer_class_changed(self, name, old, new):
        if new.lower() in self.writer_aliases:
            new = self.writer_aliases[new.lower()]
        self.writer_factory = import_item(new)

    # Post-processor specific variables
    postprocessor = Instance(
        'IPython.nbconvert.postprocessors.base.PostProcessorBase',
        help="""Instance of the PostProcessor class used to write the
                      results of the conversion.""")

    postprocessor_class = DottedOrNone(
        config=True,
        help="""PostProcessor class used to write the
                                    results of the conversion""")
    postprocessor_aliases = {
        'serve': 'IPython.nbconvert.postprocessors.serve.ServePostProcessor'
    }
    postprocessor_factory = Type()

    def _postprocessor_class_changed(self, name, old, new):
        if new.lower() in self.postprocessor_aliases:
            new = self.postprocessor_aliases[new.lower()]
        if new:
            self.postprocessor_factory = import_item(new)

    # Other configurable variables
    export_format = CaselessStrEnum(get_export_names(),
                                    default_value="html",
                                    config=True,
                                    help="""The export format to be used.""")

    notebooks = List([],
                     config=True,
                     help="""List of notebooks to convert.
                     Wildcards are supported.
                     Filenames passed positionally will be added to the list.
                     """)

    @catch_config_error
    def initialize(self, argv=None):
        self.init_syspath()
        super(NbConvertApp, self).initialize(argv)
        self.init_notebooks()
        self.init_writer()
        self.init_postprocessor()

    def init_syspath(self):
        """
        Add the cwd to the sys.path ($PYTHONPATH)
        """
        sys.path.insert(0, os.getcwd())

    def init_notebooks(self):
        """Construct the list of notebooks.
        If notebooks are passed on the command-line,
        they override notebooks specified in config files.
        Glob each notebook to replace notebook patterns with filenames.
        """

        # Specifying notebooks on the command-line overrides (rather than adds)
        # the notebook list
        if self.extra_args:
            patterns = self.extra_args
        else:
            patterns = self.notebooks

        # Use glob to replace all the notebook patterns with filenames.
        filenames = []
        for pattern in patterns:

            # Use glob to find matching filenames.  Allow the user to convert
            # notebooks without having to type the extension.
            globbed_files = glob.glob(pattern)
            globbed_files.extend(glob.glob(pattern + '.ipynb'))
            if not globbed_files:
                self.log.warn("pattern %r matched no files", pattern)

            for filename in globbed_files:
                if not filename in filenames:
                    filenames.append(filename)
        self.notebooks = filenames

    def init_writer(self):
        """
        Initialize the writer (which is stateless)
        """
        self._writer_class_changed(None, self.writer_class, self.writer_class)
        self.writer = self.writer_factory(parent=self)
        if hasattr(self.writer,
                   'build_directory') and self.writer.build_directory != '':
            self.use_output_suffix = False

    def init_postprocessor(self):
        """
        Initialize the postprocessor (which is stateless)
        """
        self._postprocessor_class_changed(None, self.postprocessor_class,
                                          self.postprocessor_class)
        if self.postprocessor_factory:
            self.postprocessor = self.postprocessor_factory(parent=self)

    def start(self):
        """
        Ran after initialization completed
        """
        super(NbConvertApp, self).start()
        self.convert_notebooks()

    def init_single_notebook_resources(self, notebook_filename):
        """Step 1: Initialize resources

        This intializes the resources dictionary for a single notebook. This
        method should return the resources dictionary, and MUST include the
        following keys:

            - profile_dir: the location of the profile directory
            - unique_key: the notebook name
            - output_files_dir: a directory where output files (not including
              the notebook itself) should be saved

        """

        # Get a unique key for the notebook and set it in the resources object.
        basename = os.path.basename(notebook_filename)
        notebook_name = basename[:basename.rfind('.')]
        if self.output_base:
            # strip duplicate extension from output_base, to avoid Basname.ext.ext
            if getattr(self.exporter, 'file_extension', False):
                base, ext = os.path.splitext(self.output_base)
                if ext == self.exporter.file_extension:
                    self.output_base = base
            notebook_name = self.output_base

        self.log.debug("Notebook name is '%s'", notebook_name)

        # first initialize the resources we want to use
        resources = {}
        resources['profile_dir'] = self.profile_dir.location
        resources['unique_key'] = notebook_name
        resources['output_files_dir'] = '%s_files' % notebook_name

        return resources

    def export_single_notebook(self, notebook_filename, resources):
        """Step 2: Export the notebook

        Exports the notebook to a particular format according to the specified
        exporter. This function returns the output and (possibly modified)
        resources from the exporter.

        """
        try:
            output, resources = self.exporter.from_filename(
                notebook_filename, resources=resources)
        except ConversionException:
            self.log.error("Error while converting '%s'",
                           notebook_filename,
                           exc_info=True)
            self.exit(1)

        return output, resources

    def write_single_notebook(self, output, resources):
        """Step 3: Write the notebook to file

        This writes output from the exporter to file using the specified writer.
        It returns the results from the writer.

        """
        if 'unique_key' not in resources:
            raise KeyError(
                "unique_key MUST be specified in the resources, but it is not")

        notebook_name = resources['unique_key']
        if self.use_output_suffix and not self.output_base:
            notebook_name += resources.get('output_suffix', '')

        write_results = self.writer.write(output,
                                          resources,
                                          notebook_name=notebook_name)
        self.log.info(write_results)
        return write_results

    def postprocess_single_notebook(self, write_results):
        """Step 4: Postprocess the notebook

        This postprocesses the notebook after it has been written, taking as an
        argument the results of writing the notebook to file. This only actually
        does anything if a postprocessor has actually been specified.

        """
        # Post-process if post processor has been defined.
        if hasattr(self, 'postprocessor') and self.postprocessor:
            self.postprocessor(write_results)

    def convert_single_notebook(self, notebook_filename):
        """Convert a single notebook. Performs the following steps:

            1. Initialize notebook resources
            2. Export the notebook to a particular format
            3. Write the exported notebook to file
            4. (Maybe) postprocess the written file

        """
        self.log.info("Converting notebook %s to %s", notebook_filename,
                      self.export_format)
        resources = self.init_single_notebook_resources(notebook_filename)
        output, resources = self.export_single_notebook(
            notebook_filename, resources)
        write_results = self.write_single_notebook(output, resources)
        self.postprocess_single_notebook(write_results)

    def convert_notebooks(self):
        """
        Convert the notebooks in the self.notebook traitlet
        """
        # check that the output base isn't specified if there is more than
        # one notebook to convert
        if self.output_base != '' and len(self.notebooks) > 1:
            self.log.error("""
                UsageError: --output flag or `NbConvertApp.output_base` config option
                cannot be used when converting multiple notebooks.
                """)
            self.exit(1)

        self.log.info("Starting to convert_notebooks()")
        # initialize the exporter
        self.exporter = exporter_map[self.export_format](config=self.config)

        # no notebooks to convert!
        if len(self.notebooks) == 0:
            self.print_help()
            sys.exit(-1)

        # convert each notebook
        for notebook_filename in self.notebooks:
            self.convert_single_notebook(notebook_filename)
Beispiel #27
0
class NbConvertApp(BaseIPythonApplication):
    """Application used to convert to and from notebook file type (*.ipynb)"""

    name = 'ipython-nbconvert'
    aliases = nbconvert_aliases
    flags = nbconvert_flags

    def _log_level_default(self):
        return logging.INFO

    def _classes_default(self):
        classes = [NbConvertBase]
        for pkg in (exporters, transformers, writers):
            for name in dir(pkg):
                cls = getattr(pkg, name)
                if isinstance(cls, type) and issubclass(cls, Configurable):
                    classes.append(cls)
        return classes

    description = Unicode(
        u"""This application is used to convert notebook files (*.ipynb)
        to various other formats.

        WARNING: THE COMMANDLINE INTERFACE MAY CHANGE IN FUTURE RELEASES.""")

    output_base = Unicode('',
                          config=True,
                          help='''overwrite base name use for output files.
            can only  be use when converting one notebook at a time.
            ''')

    examples = Unicode(u"""
        The simplest way to use nbconvert is
        
        > ipython nbconvert mynotebook.ipynb
        
        which will convert mynotebook.ipynb to the default format (probably HTML).
        
        You can specify the export format with `--to`.
        Options include {0}
        
        > ipython nbconvert --to latex mynotebook.ipnynb

        Both HTML and LaTeX support multiple output templates. LaTeX includes
        'basic', 'book', and 'article'.  HTML includes 'basic' and 'full'.  You 
        can specify the flavor of the format used.

        > ipython nbconvert --to html --template basic mynotebook.ipynb
        
        You can also pipe the output to stdout, rather than a file
        
        > ipython nbconvert mynotebook.ipynb --stdout

        A post-processor can be used to compile a PDF

        > ipython nbconvert mynotebook.ipynb --to latex --post PDF
        
        You can get (and serve) a Reveal.js-powered slideshow
        
        > ipython nbconvert myslides.ipynb --to slides --post serve
        
        Multiple notebooks can be given at the command line in a couple of 
        different ways:
  
        > ipython nbconvert notebook*.ipynb
        > ipython nbconvert notebook1.ipynb notebook2.ipynb
        
        or you can specify the notebooks list in a config file, containing::
        
            c.NbConvertApp.notebooks = ["my_notebook.ipynb"]
        
        > ipython nbconvert --config mycfg.py
        """.format(get_export_names()))

    # Writer specific variables
    writer = Instance('IPython.nbconvert.writers.base.WriterBase',
                      help="""Instance of the writer class used to write the 
                      results of the conversion.""")
    writer_class = DottedObjectName('FilesWriter',
                                    config=True,
                                    help="""Writer class used to write the 
                                    results of the conversion""")
    writer_aliases = {
        'fileswriter': 'IPython.nbconvert.writers.files.FilesWriter',
        'debugwriter': 'IPython.nbconvert.writers.debug.DebugWriter',
        'stdoutwriter': 'IPython.nbconvert.writers.stdout.StdoutWriter'
    }
    writer_factory = Type()

    def _writer_class_changed(self, name, old, new):
        if new.lower() in self.writer_aliases:
            new = self.writer_aliases[new.lower()]
        self.writer_factory = import_item(new)

    # Post-processor specific variables
    post_processor = Instance(
        'IPython.nbconvert.post_processors.base.PostProcessorBase',
        help="""Instance of the PostProcessor class used to write the 
                      results of the conversion.""")

    post_processor_class = DottedOrNone(
        config=True,
        help="""PostProcessor class used to write the 
                                    results of the conversion""")
    post_processor_aliases = {
        'pdf': 'IPython.nbconvert.post_processors.pdf.PDFPostProcessor',
        'serve': 'IPython.nbconvert.post_processors.serve.ServePostProcessor'
    }
    post_processor_factory = Type()

    def _post_processor_class_changed(self, name, old, new):
        if new.lower() in self.post_processor_aliases:
            new = self.post_processor_aliases[new.lower()]
        if new:
            self.post_processor_factory = import_item(new)

    # Other configurable variables
    export_format = CaselessStrEnum(get_export_names(),
                                    default_value="html",
                                    config=True,
                                    help="""The export format to be used.""")

    notebooks = List([],
                     config=True,
                     help="""List of notebooks to convert.
                     Wildcards are supported.
                     Filenames passed positionally will be added to the list.
                     """)

    @catch_config_error
    def initialize(self, argv=None):
        super(NbConvertApp, self).initialize(argv)
        self.init_syspath()
        self.init_notebooks()
        self.init_writer()
        self.init_post_processor()

    def init_syspath(self):
        """
        Add the cwd to the sys.path ($PYTHONPATH)
        """
        sys.path.insert(0, os.getcwd())

    def init_notebooks(self):
        """Construct the list of notebooks.
        If notebooks are passed on the command-line,
        they override notebooks specified in config files.
        Glob each notebook to replace notebook patterns with filenames.
        """

        # Specifying notebooks on the command-line overrides (rather than adds)
        # the notebook list
        if self.extra_args:
            patterns = self.extra_args
        else:
            patterns = self.notebooks

        # Use glob to replace all the notebook patterns with filenames.
        filenames = []
        for pattern in patterns:

            # Use glob to find matching filenames.  Allow the user to convert
            # notebooks without having to type the extension.
            globbed_files = glob.glob(pattern)
            globbed_files.extend(glob.glob(pattern + '.ipynb'))
            if not globbed_files:
                self.log.warn("pattern %r matched no files", pattern)

            for filename in globbed_files:
                if not filename in filenames:
                    filenames.append(filename)
        self.notebooks = filenames

    def init_writer(self):
        """
        Initialize the writer (which is stateless)
        """
        self._writer_class_changed(None, self.writer_class, self.writer_class)
        self.writer = self.writer_factory(parent=self)

    def init_post_processor(self):
        """
        Initialize the post_processor (which is stateless)
        """
        self._post_processor_class_changed(None, self.post_processor_class,
                                           self.post_processor_class)
        if self.post_processor_factory:
            self.post_processor = self.post_processor_factory(parent=self)

    def start(self):
        """
        Ran after initialization completed
        """
        super(NbConvertApp, self).start()
        self.convert_notebooks()

    def convert_notebooks(self):
        """
        Convert the notebooks in the self.notebook traitlet
        """
        # Export each notebook
        conversion_success = 0

        if self.output_base != '' and len(self.notebooks) > 1:
            self.log.error(
                """UsageError: --output flag or `NbConvertApp.output_base` config option
            cannot be used when converting multiple notebooks.
            """)
            self.exit(1)

        exporter = exporter_map[self.export_format](config=self.config)

        for notebook_filename in self.notebooks:
            self.log.info("Converting notebook %s to %s", notebook_filename,
                          self.export_format)

            # Get a unique key for the notebook and set it in the resources object.
            basename = os.path.basename(notebook_filename)
            notebook_name = basename[:basename.rfind('.')]
            if self.output_base:
                notebook_name = self.output_base
            resources = {}
            resources['unique_key'] = notebook_name
            resources['output_files_dir'] = '%s_files' % notebook_name
            self.log.info("Support files will be in %s",
                          os.path.join(resources['output_files_dir'], ''))

            # Try to export
            try:
                output, resources = exporter.from_filename(notebook_filename,
                                                           resources=resources)
            except ConversionException as e:
                self.log.error("Error while converting '%s'",
                               notebook_filename,
                               exc_info=True)
                self.exit(1)
            else:
                write_resultes = self.writer.write(output,
                                                   resources,
                                                   notebook_name=notebook_name)

                #Post-process if post processor has been defined.
                if hasattr(self, 'post_processor') and self.post_processor:
                    self.post_processor(write_resultes)
                conversion_success += 1

        # If nothing was converted successfully, help the user.
        if conversion_success == 0:
            self.print_help()
            sys.exit(-1)
Beispiel #28
0
class IPythonConsoleApp(Configurable):
    name = 'ipython-console-mixin'
    default_config_file_name='ipython_config.py'

    description = """
        The IPython Mixin Console.
        
        This class contains the common portions of console client (QtConsole,
        ZMQ-based terminal console, etc).  It is not a full console, in that
        launched terminal subprocesses will not be able to accept input.
        
        The Console using this mixing supports various extra features beyond
        the single-process Terminal IPython shell, such as connecting to
        existing kernel, via:
        
            ipython <appname> --existing
        
        as well as tunnel via SSH
        
    """

    classes = classes
    flags = Dict(flags)
    aliases = Dict(aliases)
    kernel_manager_class = BlockingKernelManager

    kernel_argv = List(Unicode)
    # frontend flags&aliases to be stripped when building kernel_argv
    frontend_flags = Any(app_flags)
    frontend_aliases = Any(app_aliases)

    # create requested profiles by default, if they don't exist:
    auto_create = CBool(True)
    # connection info:
    
    transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True)
    
    ip = Unicode(config=True,
        help="""Set the kernel\'s IP address [default localhost].
        If the IP address is something other than localhost, then
        Consoles on other machines will be able to connect
        to the Kernel, so be careful!"""
    )
    def _ip_default(self):
        if self.transport == 'tcp':
            return LOCALHOST
        else:
            # this can fire early if ip is given,
            # in which case our return value is meaningless
            if not hasattr(self, 'profile_dir'):
                return ''
            ipcdir = os.path.join(self.profile_dir.security_dir, 'kernel-%s' % os.getpid())
            os.makedirs(ipcdir)
            atexit.register(lambda : shutil.rmtree(ipcdir))
            return os.path.join(ipcdir, 'ipc')
    
    sshserver = Unicode('', config=True,
        help="""The SSH server to use to connect to the kernel.""")
    sshkey = Unicode('', config=True,
        help="""Path to the ssh key to use for logging in to the ssh server.""")
    
    hb_port = Int(0, config=True,
        help="set the heartbeat port [default: random]")
    shell_port = Int(0, config=True,
        help="set the shell (ROUTER) port [default: random]")
    iopub_port = Int(0, config=True,
        help="set the iopub (PUB) port [default: random]")
    stdin_port = Int(0, config=True,
        help="set the stdin (DEALER) port [default: random]")
    connection_file = Unicode('', config=True,
        help="""JSON file in which to store connection info [default: kernel-<pid>.json]

        This file will contain the IP, ports, and authentication key needed to connect
        clients to this kernel. By default, this file will be created in the security-dir
        of the current profile, but can be specified by absolute path.
        """)
    def _connection_file_default(self):
        return 'kernel-%i.json' % os.getpid()

    existing = CUnicode('', config=True,
        help="""Connect to an already running kernel""")

    confirm_exit = CBool(True, config=True,
        help="""
        Set to display confirmation dialog on exit. You can always use 'exit' or 'quit',
        to force a direct exit without any confirmation.""",
    )


    def build_kernel_argv(self, argv=None):
        """build argv to be passed to kernel subprocess"""
        if argv is None:
            argv = sys.argv[1:]
        self.kernel_argv = swallow_argv(argv, self.frontend_aliases, self.frontend_flags)
        # kernel should inherit default config file from frontend
        self.kernel_argv.append("--KernelApp.parent_appname='%s'"%self.name)
    
    def init_connection_file(self):
        """find the connection file, and load the info if found.
        
        The current working directory and the current profile's security
        directory will be searched for the file if it is not given by
        absolute path.
        
        When attempting to connect to an existing kernel and the `--existing`
        argument does not match an existing file, it will be interpreted as a
        fileglob, and the matching file in the current profile's security dir
        with the latest access time will be used.
        
        After this method is called, self.connection_file contains the *full path*
        to the connection file, never just its name.
        """
        if self.existing:
            try:
                cf = find_connection_file(self.existing)
            except Exception:
                self.log.critical("Could not find existing kernel connection file %s", self.existing)
                self.exit(1)
            self.log.info("Connecting to existing kernel: %s" % cf)
            self.connection_file = cf
        else:
            # not existing, check if we are going to write the file
            # and ensure that self.connection_file is a full path, not just the shortname
            try:
                cf = find_connection_file(self.connection_file)
            except Exception:
                # file might not exist
                if self.connection_file == os.path.basename(self.connection_file):
                    # just shortname, put it in security dir
                    cf = os.path.join(self.profile_dir.security_dir, self.connection_file)
                else:
                    cf = self.connection_file
                self.connection_file = cf
        
        # should load_connection_file only be used for existing?
        # as it is now, this allows reusing ports if an existing
        # file is requested
        try:
            self.load_connection_file()
        except Exception:
            self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
            self.exit(1)
    
    def load_connection_file(self):
        """load ip/port/hmac config from JSON connection file"""
        # this is identical to KernelApp.load_connection_file
        # perhaps it can be centralized somewhere?
        try:
            fname = filefind(self.connection_file, ['.', self.profile_dir.security_dir])
        except IOError:
            self.log.debug("Connection File not found: %s", self.connection_file)
            return
        self.log.debug(u"Loading connection file %s", fname)
        with open(fname) as f:
            cfg = json.load(f)
        
        self.transport = cfg.get('transport', 'tcp')
        if 'ip' in cfg:
            self.ip = cfg['ip']
        for channel in ('hb', 'shell', 'iopub', 'stdin'):
            name = channel + '_port'
            if getattr(self, name) == 0 and name in cfg:
                # not overridden by config or cl_args
                setattr(self, name, cfg[name])
        if 'key' in cfg:
            self.config.Session.key = str_to_bytes(cfg['key'])
        
    
    def init_ssh(self):
        """set up ssh tunnels, if needed."""
        if not self.sshserver and not self.sshkey:
            return
        
        if self.transport != 'tcp':
            self.log.error("Can only use ssh tunnels with TCP sockets, not %s", self.transport)
            return
        
        if self.sshkey and not self.sshserver:
            # specifying just the key implies that we are connecting directly
            self.sshserver = self.ip
            self.ip = LOCALHOST
        
        # build connection dict for tunnels:
        info = dict(ip=self.ip,
                    shell_port=self.shell_port,
                    iopub_port=self.iopub_port,
                    stdin_port=self.stdin_port,
                    hb_port=self.hb_port
        )
        
        self.log.info("Forwarding connections to %s via %s"%(self.ip, self.sshserver))
        
        # tunnels return a new set of ports, which will be on localhost:
        self.ip = LOCALHOST
        try:
            newports = tunnel_to_kernel(info, self.sshserver, self.sshkey)
        except:
            # even catch KeyboardInterrupt
            self.log.error("Could not setup tunnels", exc_info=True)
            self.exit(1)
        
        self.shell_port, self.iopub_port, self.stdin_port, self.hb_port = newports
        
        cf = self.connection_file
        base,ext = os.path.splitext(cf)
        base = os.path.basename(base)
        self.connection_file = os.path.basename(base)+'-ssh'+ext
        self.log.critical("To connect another client via this tunnel, use:")
        self.log.critical("--existing %s" % self.connection_file)
    
    def _new_connection_file(self):
        cf = ''
        while not cf:
            # we don't need a 128b id to distinguish kernels, use more readable
            # 48b node segment (12 hex chars).  Users running more than 32k simultaneous
            # kernels can subclass.
            ident = str(uuid.uuid4()).split('-')[-1]
            cf = os.path.join(self.profile_dir.security_dir, 'kernel-%s.json' % ident)
            # only keep if it's actually new.  Protect against unlikely collision
            # in 48b random search space
            cf = cf if not os.path.exists(cf) else ''
        return cf

    def init_kernel_manager(self):
        # Don't let Qt or ZMQ swallow KeyboardInterupts.
        signal.signal(signal.SIGINT, signal.SIG_DFL)

        # Create a KernelManager and start a kernel.
        self.kernel_manager = self.kernel_manager_class(
                                transport=self.transport,
                                ip=self.ip,
                                shell_port=self.shell_port,
                                iopub_port=self.iopub_port,
                                stdin_port=self.stdin_port,
                                hb_port=self.hb_port,
                                connection_file=self.connection_file,
                                config=self.config,
        )
        # start the kernel
        if not self.existing:
            self.kernel_manager.start_kernel(extra_arguments=self.kernel_argv)
        elif self.sshserver:
            # ssh, write new connection file
            self.kernel_manager.write_connection_file()
        atexit.register(self.kernel_manager.cleanup_connection_file)
        self.kernel_manager.start_channels()


    def initialize(self, argv=None):
        """
        Classes which mix this class in should call:
               IPythonConsoleApp.initialize(self,argv)
        """
        self.init_connection_file()
        default_secure(self.config)
        self.init_ssh()
        self.init_kernel_manager()
Beispiel #29
0
class InteractiveShellApp(Configurable):
    """A Mixin for applications that start InteractiveShell instances.
    
    Provides configurables for loading extensions and executing files
    as part of configuring a Shell environment.

    The following methods should be called by the :meth:`initialize` method
    of the subclass:

      - :meth:`init_path`
      - :meth:`init_shell` (to be implemented by the subclass)
      - :meth:`init_gui_pylab`
      - :meth:`init_extensions`
      - :meth:`init_code`
    """
    extensions = List(
        Unicode,
        config=True,
        help="A list of dotted module names of IPython extensions to load.")
    extra_extension = Unicode(
        '',
        config=True,
        help="dotted module name of an IPython extension to load.")

    def _extra_extension_changed(self, name, old, new):
        if new:
            # add to self.extensions
            self.extensions.append(new)

    # Extensions that are always loaded (not configurable)
    default_extensions = List(Unicode, [u'storemagic'], config=False)

    exec_files = List(Unicode,
                      config=True,
                      help="""List of files to run at IPython startup.""")
    file_to_run = Unicode('', config=True, help="""A file to be run""")

    exec_lines = List(Unicode,
                      config=True,
                      help="""lines of code to run at IPython startup.""")
    code_to_run = Unicode('',
                          config=True,
                          help="Execute the given command string.")
    module_to_run = Unicode('',
                            config=True,
                            help="Run the module as a script.")
    gui = CaselessStrEnum(
        ('qt', 'wx', 'gtk', 'glut', 'pyglet', 'osx'),
        config=True,
        help=
        "Enable GUI event loop integration ('qt', 'wx', 'gtk', 'glut', 'pyglet', 'osx')."
    )
    pylab = CaselessStrEnum(
        ['tk', 'qt', 'wx', 'gtk', 'osx', 'inline', 'auto'],
        config=True,
        help="""Pre-load matplotlib and numpy for interactive use,
        selecting a particular matplotlib backend and loop integration.
        """)
    pylab_import_all = Bool(
        True,
        config=True,
        help="""If true, an 'import *' is done from numpy and pylab,
        when using pylab""")
    shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')

    def init_path(self):
        """Add current working directory, '', to sys.path"""
        if sys.path[0] != '':
            sys.path.insert(0, '')

    def init_shell(self):
        raise NotImplementedError("Override in subclasses")

    def init_gui_pylab(self):
        """Enable GUI event loop integration, taking pylab into account."""
        if self.gui or self.pylab:
            shell = self.shell
            try:
                if self.pylab:
                    gui, backend = pylabtools.find_gui_and_backend(self.pylab)
                    self.log.info("Enabling GUI event loop integration, "
                                  "toolkit=%s, pylab=%s" % (gui, self.pylab))
                    shell.enable_pylab(gui,
                                       import_all=self.pylab_import_all,
                                       welcome_message=True)
                else:
                    self.log.info("Enabling GUI event loop integration, "
                                  "toolkit=%s" % self.gui)
                    shell.enable_gui(self.gui)
            except Exception:
                self.log.warn("GUI event loop or pylab initialization failed")
                self.shell.showtraceback()

    def init_extensions(self):
        """Load all IPython extensions in IPythonApp.extensions.

        This uses the :meth:`ExtensionManager.load_extensions` to load all
        the extensions listed in ``self.extensions``.
        """
        try:
            self.log.debug("Loading IPython extensions...")
            extensions = self.default_extensions + self.extensions
            for ext in extensions:
                try:
                    self.log.info("Loading IPython extension: %s" % ext)
                    self.shell.extension_manager.load_extension(ext)
                except:
                    self.log.warn("Error in loading extension: %s" % ext +
                                  "\nCheck your config files in %s" %
                                  self.profile_dir.location)
                    self.shell.showtraceback()
        except:
            self.log.warn("Unknown error in loading extensions:")
            self.shell.showtraceback()

    def init_code(self):
        """run the pre-flight code, specified via exec_lines"""
        self._run_startup_files()
        self._run_exec_lines()
        self._run_exec_files()
        self._run_cmd_line_code()
        self._run_module()

        # flush output, so itwon't be attached to the first cell
        sys.stdout.flush()
        sys.stderr.flush()

        # Hide variables defined here from %who etc.
        self.shell.user_ns_hidden.update(self.shell.user_ns)

    def _run_exec_lines(self):
        """Run lines of code in IPythonApp.exec_lines in the user's namespace."""
        if not self.exec_lines:
            return
        try:
            self.log.debug("Running code from IPythonApp.exec_lines...")
            for line in self.exec_lines:
                try:
                    self.log.info("Running code in user namespace: %s" % line)
                    self.shell.run_cell(line, store_history=False)
                except:
                    self.log.warn("Error in executing line in user "
                                  "namespace: %s" % line)
                    self.shell.showtraceback()
        except:
            self.log.warn("Unknown error in handling IPythonApp.exec_lines:")
            self.shell.showtraceback()

    def _exec_file(self, fname):
        try:
            full_filename = filefind(fname, [u'.', self.ipython_dir])
        except IOError as e:
            self.log.warn("File not found: %r" % fname)
            return
        # Make sure that the running script gets a proper sys.argv as if it
        # were run from a system shell.
        save_argv = sys.argv
        sys.argv = [full_filename] + self.extra_args[1:]
        # protect sys.argv from potential unicode strings on Python 2:
        if not py3compat.PY3:
            sys.argv = [py3compat.cast_bytes(a) for a in sys.argv]
        try:
            if os.path.isfile(full_filename):
                if full_filename.endswith('.ipy'):
                    self.log.info("Running file in user namespace: %s" %
                                  full_filename)
                    self.shell.safe_execfile_ipy(full_filename)
                else:
                    # default to python, even without extension
                    self.log.info("Running file in user namespace: %s" %
                                  full_filename)
                    # Ensure that __file__ is always defined to match Python behavior
                    self.shell.user_ns['__file__'] = fname
                    try:
                        self.shell.safe_execfile(full_filename,
                                                 self.shell.user_ns)
                    finally:
                        del self.shell.user_ns['__file__']
        finally:
            sys.argv = save_argv

    def _run_startup_files(self):
        """Run files from profile startup directory"""
        startup_dir = self.profile_dir.startup_dir
        startup_files = glob.glob(os.path.join(startup_dir, '*.py'))
        startup_files += glob.glob(os.path.join(startup_dir, '*.ipy'))
        if not startup_files:
            return

        self.log.debug("Running startup files from %s...", startup_dir)
        try:
            for fname in sorted(startup_files):
                self._exec_file(fname)
        except:
            self.log.warn("Unknown error in handling startup files:")
            self.shell.showtraceback()

    def _run_exec_files(self):
        """Run files from IPythonApp.exec_files"""
        if not self.exec_files:
            return

        self.log.debug("Running files in IPythonApp.exec_files...")
        try:
            for fname in self.exec_files:
                self._exec_file(fname)
        except:
            self.log.warn("Unknown error in handling IPythonApp.exec_files:")
            self.shell.showtraceback()

    def _run_cmd_line_code(self):
        """Run code or file specified at the command-line"""
        if self.code_to_run:
            line = self.code_to_run
            try:
                self.log.info("Running code given at command line (c=): %s" %
                              line)
                self.shell.run_cell(line, store_history=False)
            except:
                self.log.warn("Error in executing line in user namespace: %s" %
                              line)
                self.shell.showtraceback()

        # Like Python itself, ignore the second if the first of these is present
        elif self.file_to_run:
            fname = self.file_to_run
            try:
                self._exec_file(fname)
            except:
                self.log.warn("Error in executing file in user namespace: %s" %
                              fname)
                self.shell.showtraceback()

    def _run_module(self):
        """Run module specified at the command-line."""
        if self.module_to_run:
            # Make sure that the module gets a proper sys.argv as if it were
            # run using `python -m`.
            save_argv = sys.argv
            sys.argv = [sys.executable] + self.extra_args
            try:
                self.shell.safe_run_module(self.module_to_run,
                                           self.shell.user_ns)
            finally:
                sys.argv = save_argv
Beispiel #30
0
class Knitpy(LoggingConfigurable):
    """Engine used to convert from python markdown (``*.pymd``) to html/latex/..."""
    keep_md = Bool(False,
                   config=True,
                   help="""Whether to keep the temporary md files""")

    log_to_file = Bool(False,
                       config=True,
                       help="""Whether to send the log to a file""")

    extra_document_configs = List(
        default_value=[],
        config=True,
        help="Additional configurations for FinalOutputDocuments")

    default_export_format = CaselessStrEnum(
        VALID_OUTPUT_FORMAT_NAMES,
        default_value=DEFAULT_OUTPUT_FORMAT_NAME,
        config=True,
        help=
        """The export format to be used (can't by from extra_document_configs!)."""
    )

    kernel_debug = Bool(
        False,
        config=True,
        help="""Whether to output kernel messages to the (debug) log""")

    timeout = Integer(10,
                      config=True,
                      help="timeout for individual code executions")

    # Things for the parser...
    chunk_begin = CRegExpMultiline(
        r'^\s*```+\s*{[.]?(?P<engine>[a-z]+)\s*(?P<args>.*)}\s*$',
        config=True,
        help="chunk begin regex (must include the named "
        "group 'engine' and 'args'")
    chunk_end = CRegExpMultiline(r'^\s*```+\s*$',
                                 config=True,
                                 help="chunk end regex")
    inline_code = CRegExpMultiline(
        r'`(?P<engine>[a-z]+) +([^`]+)\s*`',
        config=True,
        help="inline code regex (must include a named group 'engine')")
    comment_line = CRegExp(r'^\s*#', config=True, help="comment line regex")
    yaml_separator = CRegExpMultiline(r"^---\s*$",
                                      config=True,
                                      help="separator for the yaml metadata")

    def __init__(self, **kwargs):
        super(Knitpy, self).__init__(**kwargs)
        self.init_kernel_manager()
        self.init_engines()
        self.init_output_configurations()

    def init_kernel_manager(self):
        self._km = MultiKernelManager(log=self.log, parent=self)
        self._ksm = KernelSpecManager(log=self.log, parent=self)
        self._kernels = {}
        #ksm.find_kernel_specs()

    def init_engines(self):
        self._engines = {}
        self._engines["python"] = PythonKnitpyEngine(parent=self)
        # TODO: check that every kernel_name is in ksm.find_kernel_specs()

    def init_output_configurations(self):
        self._outputs = {}
        for config in DEFAULT_FINAL_OUTPUT_FORMATS:
            fod = FinalOutputConfiguration(parent=self, **config)
            self._outputs[config["name"]] = fod
            self._outputs[config["alias"]] = fod
        for config in self.extra_document_configs:
            fod = FinalOutputConfiguration(parent=self, **config)
            self._outputs[config["name"]] = fod
            self._outputs[config["alias"]] = fod

    def parse_document(self, input):
        if os.path.exists(input):
            filename = input
            f = codecs.open(filename, 'r', 'UTF-8')
            doc = f.read()
        else:
            doc = input
            filename = "anonymous_input"

        # the yaml can stay in the doc, pandoc will remove '---' blocks
        # pandoc will also do it's own interpretation and use title/author and so on...
        # ToDo: not sure of that should stay or if we should start with clean metadata
        # title, author, date
        # title: "A first try"
        # author: "Jan Schulz"
        # date: "Monday, February 23, 2015"
        # default values
        metadata = {
            "title": filename,
            "author": getpass.getuser(),
            "date": datetime.datetime.now().strftime("%A, %B %d, %Y")
        }

        pos = 0
        start = self.yaml_separator.search(doc, pos)
        if not start is None:
            end = self.yaml_separator.search(doc, start.end())
            if end is None:
                raise ParseException("Found no metadata end separator.")
            try:
                res = yaml.load(doc[start.end():end.start()])
                self.log.debug("Metadata: %s", res)
                metadata.update(res)
            except Exception as e:
                raise ParseException("Malformed metadata: %s" % str(e))

        parsed_doc = self._parse_blocks(doc)
        return parsed_doc, metadata

    def _parse_blocks(self, doc):
        result = []
        doc_pos = 0
        blocks = self.chunk_begin.finditer(doc)
        for block_start in blocks:
            # process the text before the match
            text = doc[doc_pos:block_start.start()]
            self._parse_inline(text, result)
            # TODO: somehow a empty line before a codeblock vanishes, so add one here
            result.append((TTEXT, "\n"))
            # now the block itself
            # find the end of the block
            block_end = self.chunk_end.search(doc, block_start.end())
            if block_end is None:
                raise ParseException(
                    "Found no end for the block starting at pos %s" %
                    block_start.end())
            result.append((TBLOCK, (doc[block_start.end():block_end.start()],
                                    block_start.groupdict())))
            doc_pos = block_end.end()
        # text after the last block
        self._parse_inline(doc[doc_pos:], result)
        return result

    def _parse_inline(self, text, result):
        text_pos = 0
        for inline in self.inline_code.finditer(text):
            # text before inline code
            result.append((TTEXT, text[text_pos:inline.start()]))
            # inline code
            engine_offset = len(inline.group('engine')) + 1
            result.append(
                (TINLINE,
                 (text[inline.start() + engine_offset + 1:inline.end() - 1],
                  inline.groupdict())))
            text_pos = inline.end()
        # text after the last inline code
        result.append((TTEXT, text[text_pos:]))

    def _all_lines_comments(self, lines):
        for line in lines.split("\n"):
            if not self.comment_line.match(line):
                return False
        return True

    def convert(self, parsed, output):

        context = ExecutionContext(output=output)

        for entry in parsed:
            if entry[0] == TBLOCK:
                context.mode = "block"
                self._process_code(entry[1], context=context)
            elif entry[0] == TINLINE:
                context.mode = "inline"
                self._process_code(entry[1], context=context)
            elif entry[0] == TTEXT:
                output.add_text(entry[1])
            else:
                raise ParseException("Found something unexpected: %s" % entry)
        # process_code opened kernels, so close them here
        self._km.shutdown_all()
        # workaround for https://github.com/ipython/ipython/issues/8007
        # FIXME: remove if IPython >3.0 is in require
        self._km._kernels.clear()
        self._kernels = {}
        return output

    def _process_code(self, input, context):

        context.execution_started()

        # setup the execution context
        code = input[0]
        intro = input[1]
        engine_name = intro["engine"]
        raw_args = intro.get("args", "")

        args = self._parse_args(raw_args)

        # for compatibility with knitr, where python is specified via "{r engine='python'}"
        if "engine" in args:
            engine_name = args.pop("engine")
            self.log.debug("Running on engine: %s", engine_name)

        try:
            engine = self._engines[engine_name]
        except:
            raise ParseException("Unknown codeblock type: %s" % engine_name)
        assert not engine is None, "Engine is None"
        context.engine = engine
        if not engine.name in context.enabled_documents:
            plotting_formats = context.output.export_config.accepted_image_formats
            plot_code = engine.get_plotting_format_code(plotting_formats)
            self._run_silently(context.engine.kernel, plot_code)
            context.enabled_documents.append(engine.name)
            self.log.info("Enabled image formats '%s' in engine '%s'.",
                          plotting_formats, engine.name)

        # configure the context
        if "echo" in args:
            context.echo = args.pop("echo")

        # eval=False means that we don't execute the block at all
        if "eval" in args:
            _eval = args.pop("eval")
            if _eval is False:
                if context.echo:
                    code = code.replace(os.linesep, "\n").lstrip("\n")
                    context.output.add_code(code, language=engine.language)
                return

        if "results" in args:
            context.results = args.pop("results")

        if "include" in args:
            context.include = args.pop("include")

        if "chunk_label" in args:
            context.chunk_label = args.pop("chunk_label")
        else:
            context.chunk_label = u"unnamed-chunk-%s" % context.chunk_number

        if "comment" in args:
            context.comment = args.pop("comment")

        if args:
            self.log.debug("Found unhandled args: %s", args)

        lines = ''
        code_lines = code.split('\n')
        space_re = re.compile(r'^([\s]+)')
        spaces = []

        # TODO: this whole "merge with the next line" should be rewritten as a generator
        def loop_continues(line_no):
            if len(code_lines) <= line_no:
                return False

            candidate = code_lines[line_no]
            # comments should be swallowed if a line further down has code in it with the
            # right number of spaces in front
            while candidate.strip() == "" or self._all_lines_comments(
                    candidate):
                line_no += 1
                if len(code_lines) <= line_no:
                    return False
                candidate = code_lines[line_no]
            # the next code line must have either the same number of spaces (still in a loop),
            # or less spaces as in 'spaces' (nested loop) or none (end of loop). If more spaces
            # are found or different types of spaces, this will result in an error which will be
            # shown when the code is executed...
            while spaces:
                possible_space = spaces[-1]
                if candidate[:len(possible_space)] == possible_space:
                    # ok, we are at the "right" level of space
                    return True
                # not our "space", so remove it and try the one one nesting above
                spaces.pop()
            return False

        for line_no in range(len(code_lines)):
            cur_line = code_lines[line_no]
            lines = lines + cur_line
            # check if we are in a loop and if so, if the next line also belongs to this loop
            # this only catches the case where we are *in* a loop and not the loop start (the line
            #  with a ':' in it. That line is catched by the is_complete call below. nested loops
            #  are also catched due to the space in front of it
            m = space_re.match(cur_line)
            if m:
                cur_space = m.group(0)
                spaces.append(cur_space)
                if loop_continues(line_no + 1):
                    lines += "\n"
                    continue

            if spaces:
                # we are in a loop, as spaces has some spaces in it, but the code above didn't find
                # any spaces in front of the line -> this is the case when loop_continues found a
                # new codeline from this loop after a comment with different spaces in front of
                # it or an empty line. This could be such an empty/comment line and we have to
                # look at the next line as well!
                if cur_line.strip() == "" or self._all_lines_comments(
                        cur_line):
                    lines += "\n"
                    continue
            # we have a block of code, including all lines of a loop
            msg = engine.kernel.is_complete(lines + "\n\n")
            reply = engine.kernel.get_shell_msg(timeout=self.timeout)
            assert reply['msg_type'] == 'is_complete_reply', str(reply)
            if self.kernel_debug:
                self.log.debug("completion_request: %s", msg)
            if reply['content']['status'] == 'complete':
                if lines.strip() == "":
                    # No requests for "no code"
                    lines = ""
                    continue
                elif self._all_lines_comments(lines):
                    # comments should go to to the next code block
                    lines += "\n"
                    continue
                # run the lines
                self._run_lines(lines + "\n", context)
                lines = ""
            elif reply['content']['status'] == 'invalid':
                # TODO: not sure how this should be handled
                # Either abort execution of the whole file or just retry with the next line?
                # However this should be handled via a user message
                self.log.info("Code invalid:\n%s", lines)
                context.output.add_code(lines, language=engine.language)
                context.output.add_execution_error("Code invalid")
                lines = ""
            else:
                # the "incomplete" case: don't run anything wait for the next line
                lines += "\n"

        # This can only happen if the last line is incomplete
        # This will always result in an error!
        if lines.strip() != "":
            self._run_lines(lines, context)

        context.execution_finished()

    def _parse_args(self, raw_args):
        # Todo: knitr interprets all values, so code references are possible
        # This also means that we have to do args parsing at interpretation time, so that
        # variable from other code can be taken into account..

        args = {}
        if raw_args.strip() == "":
            return args

        converter = {
            "True": True,
            "False": False,
            "None": None,
            "T": True,  # Rs True/False
            "F": False,
            "TRUE": True,
            "FALSE": False,
            # treat Rs NA as None, probably overkill to look for float("NA")
            "NA": None,
            "NULL": None,
        }

        # The first is special as that can be the name of the chunk
        first = True
        for arg in raw_args.split(","):
            arg = arg.strip()
            if not "=" in arg:
                if not first:
                    raise ParseException(
                        "Malformed options for code chunk: '%s' in '%s'" %
                        (arg, raw_args))
                args["chunk_label"] = arg
                continue
            first = False
            label, value = arg.split("=")
            v = value.strip()
            # convert to real types.
            # TODO: Should be done by submitting the whole thing to the kernel, like knitr does
            # -> variables form one codecell can be used in the args of the next one ...
            if (v[0] == '"' and v[-1] == '"'):
                v = v[1:-1]
            elif (v[0] == "'" and v[-1] == "'"):
                v = v[1:-1]
            elif v in converter:
                v = converter[v]
            else:
                try:
                    v = int(v)
                except:
                    self.log.error(
                        "Could not decode option value: '%s=%s'. Discarded...",
                        label, v)
                    continue

            args[label.strip()] = v

        return args

    def _run_lines(self, lines, context):
        kernel = context.engine.kernel
        msg_id = kernel.execute(lines)
        if self.kernel_debug:
            self.log.debug("Executing lines (msg_id=%s):\n%s", msg_id, lines)
        # wait for finish, with timeout
        # At first we have to wait until the kernel tells us it is finished with running the code
        while True:
            try:
                msg = kernel.shell_channel.get_msg(timeout=self.timeout)
                if self.kernel_debug:
                    self.log.debug("shell msg: %s", msg)
            except Empty:
                # This indicates that something bad happened, as AFAIK this should return...
                self.log.error("Timeout waiting for execute reply")
                raise KnitpyException("Timeout waiting for execute reply.")
            if msg['parent_header'].get('msg_id') == msg_id:
                # It's finished, and we got our reply, so next look at the results
                break
            else:
                # not our reply
                self.log.debug(
                    "Discarding message from a different client: %s" % msg)
                continue

        # Now look at the results of our code execution and earlier completion requests
        # We handle messages until the kernel indicates it's ide again
        status_idle_again = False
        while True:
            try:
                msg = kernel.get_iopub_msg(timeout=self.timeout)
            except Empty:
                # There should be at least some messages: we just executed code!
                # The only valid time could be when the timeout happened too early (aka long
                # running code in the document) -> we handle that below
                self.log.warn("Timeout waiting for expected IOPub output")
                break

            if msg['parent_header'].get('msg_id') != msg_id:
                if msg['parent_header'].get(
                        u'msg_type') != u'is_complete_request':
                    # not an output from our execution and not one of the complete_requests
                    self.log.debug(
                        "Discarding output from a different client: %s" % msg)
                else:
                    # complete_requests are ok
                    pass
                continue

            # Here we have some message which corresponds to our code execution
            msg_type = msg['msg_type']
            content = msg['content']

            # The kernel indicates some status: executing -> idle
            if msg_type == 'status':
                if content['execution_state'] == 'idle':
                    # When idle, the kernel has executed all input
                    status_idle_again = True
                    break
                else:
                    # the "starting execution" messages
                    continue
            elif msg_type == 'clear_output':
                # we don't handle that!?
                self.log.debug(
                    "Discarding unexpected 'clear_output' message: %s" % msg)
                continue
            ## So, from here on we have a messages with real content
            if self.kernel_debug:
                self.log.debug("iopub msg (%s): %s", msg_type, msg)
            if context.include:
                self._handle_return_message(msg, context)

        if not status_idle_again:
            self.log.error(
                "Code lines didn't execute in time. Don't use long-running code in "
                "documents or increase the timeout!")
            self.log.error("line(s): %s" % lines)

    def _handle_return_message(self, msg, context):
        if context.mode == "inline":
            #self.log.debug("inline: %s" % msg)
            if msg["msg_type"] == "execute_result":
                context.output.add_text(_plain_text(msg["content"]))
        elif context.mode == "block":
            #self.log.debug("block: %s" % msg)
            type = msg["msg_type"]
            if type == "execute_input":
                if context.echo:
                    context.output.add_code(_code(msg[u'content']),
                                            language=context.engine.language)
            elif type == "stream":
                # {u'text': u'a\nb\nc\n', u'name': u'stdout'}
                # TODO: format stdout and stderr differently?
                txt = msg["content"].get("text", "")
                if txt.strip() == "":
                    return
                if context.results == 'markup':
                    context.output.add_output(txt)
                elif context.results == 'asis':
                    context.output.add_asis(txt)
                elif context.results == 'hide':
                    return
                else:
                    # TODO: implement a caching system... again...
                    self.log.warn(
                        "Can't handle results='hold' yet, falling back to 'markup'."
                    )
                    context.output.add_output(txt)
            elif (type == "execute_result") or (type == "display_data"):
                if context.results == 'hide':
                    return
                if context.results == 'hold':
                    self.log.warn(
                        "Can't handle results='hold' yet, falling back to 'markup'."
                    )

                # Here we handle the output from the IPython display framework.
                # 1. If a object has a _display_ipython(), that will be called. This method should
                #    publish (one) display_data message and return -> the content ends up in
                #    "display_data" msg and the "executive_result" has no data
                # 2. else try different IPython.core.formatters for the object, which basically
                #    call the right _repr_<whatever>_ method to get a formated string in that
                #    mimetype. This is added as alternatives under content.data of the
                #    "executive_result".

                # data has/can have multiple types of the same message
                data = msg[u"content"][u'data']
                #self.log.debug(str(data))

                # handle plots
                #self.log.debug("Accepted image mimetypes: %s", context.output.export_config.accepted_image_mimetypes)
                for mime_type in context.output.export_config.accepted_image_mimetypes:
                    mime_data = data.get(mime_type, None)
                    if mime_data is None:
                        self.log.debug("No image found: %s", mime_type)
                        continue
                    try:
                        self.log.debug("Trying to include image...")
                        context.output.add_image(mime_type,
                                                 mime_data,
                                                 title="")
                    except KnitpyOutputException as e:
                        self.log.info("Couldn't include image: %s", e)
                        continue
                    return

                # now try some marked up text formats
                for mime_type in context.output.markup_mimetypes:
                    mime_data = data.get(mime_type, None)
                    if mime_data is None:
                        continue
                    try:
                        self.log.debug("Trying to include markup text...")
                        context.output.add_markup_text(mime_type, mime_data)
                    except KnitpyOutputException as e:
                        self.log.info("Couldn't include markup text: %s", e)
                        continue
                    return

                # as a last resort, try plain text...
                if u'text/plain' in data:
                    txt = data.get(u"text/plain", "")
                    if txt != "":
                        if context.results == 'markup':
                            context.output.add_output(txt)
                            if txt[-1] != "\n":
                                context.output.add_output("\n")
                        elif context.results == 'asis':
                            context.output.add_asis(txt)
                            if txt[-1] != "\n":
                                context.output.add_asis("\n")

                        return

                # If we are here,  we couldn't handle any of the more specific data types
                # and didn't find any output text
                excuse = "\n(Found data of type '{}', but couldn't handle it)\n"
                context.output.add_output(excuse.format(data.keys()))
            elif (type == "error"):
                ename = msg["content"].get("ename", "unknown exception")
                evalue = msg["content"].get("evalue",
                                            "unknown exception value")
                tb = msg["content"].get("traceback", "<not available>")
                if not is_string(tb):
                    # remove the first line...
                    tb = "\n".join(tb[1:])
                self.log.info(tb)
                #there are ansi escape sequences in the traceback, which kills pandoc :-(
                if u"\x1b[1;32m" in tb:
                    tb = "!! traceback unavailable due to included color sequences;\n" \
                         "!! execute `%colors NoColor` once before this line to remove them!"
                context.output.add_execution_error("%s: %s" % (ename, evalue),
                                                   tb)
            else:
                self.log.debug("Ignored msg of type %s" % type)

    def _run_silently(self, kc, lines):
        try:
            msg_id = kc.execute(lines + "\n\n",
                                silent=self.kernel_debug,
                                store_history=False)
            self.log.debug("Executed silent code: %s", lines)
            reply = kc.get_shell_msg(timeout=self.timeout)
            assert reply['parent_header'].get(
                'msg_id') == msg_id, "Wrong reply! " + str(reply)
            if self.kernel_debug:
                self.log.debug("Silent code shell reply: %s", reply)
        except Empty:
            self.log.error("Code took too long:\n %s", lines)

        # now empty the iopub channel (there is at least a "starting" message)
        while True:
            try:
                msg = kc.get_iopub_msg(timeout=0.1)
                if self.kernel_debug:
                    self.log.debug("Silent code iopub msg: %s", msg)
            except Empty:
                break

    def _get_kernel(self, engine):
        kernel_name = engine.kernel_name
        kernel_startup_lines = engine.startup_lines

        if not kernel_name in self._kernels:
            self.log.info("Starting a new kernel: %s" % kernel_name)
            kernelid = self._km.start_kernel(kernel_name=kernel_name)
            #km.list_kernel_ids()
            kn = self._km.get_kernel(kernelid)
            kc = kn.client()
            self._kernels[kernel_name] = kc
            # now initalize the channels
            kc.start_channels()
            kc.wait_for_ready()
            self._run_silently(kc, kernel_startup_lines)
            self.log.info("Executed kernel startup lines for engine '%s'.",
                          engine.name)

        return self._kernels[kernel_name]

    def get_output_format(self, fmt_name, config=None):
        self._ensure_valid_output(fmt_name)
        fod = self._outputs.get(fmt_name).copy()
        # self.log.info("%s: %s", fmt_name, config)
        if not config:
            pass
        elif isinstance(config, dict):
            fod.update(**config)
        elif config == "default":
            # html_document: default
            pass
        else:
            self.log.error(
                "Unknown config for document '%s': '%s'. Ignored...", fmt_name,
                config)
        return fod

    def _knit(self, input, outputdir_name, final_format="html", config=None):
        """Internal function to aid testing"""

        parsed, metadata = self.parse_document(input)  # sets kpydoc.parsed and
        final_format = self.get_output_format(final_format, config=config)

        md_temp = TemporaryOutputDocument(fileoutputs=outputdir_name,
                                          export_config=final_format,
                                          log=self.log,
                                          parent=self)

        # get the temporary md file
        self.convert(parsed, md_temp)

        return md_temp.content

    def render(self, filename, output=None):
        """
        Convert the filename to the given output format(s)
        """
        # Export each documents
        conversion_success = 0
        converted_docs = []

        # save here to change back after the conversation.
        orig_cwd = os.getcwd()
        needs_chdir = False

        # expand $HOME and so on...
        filename = expand_path(filename)
        filename = os.path.abspath(filename)
        self.log.info("Converting %s..." % filename)

        basedir = os.path.dirname(filename)
        basename = os.path.splitext(os.path.basename(filename))[0]

        # It's easier if we just change wd to the dir of the file
        if unicode_type(basedir) != py3compat.getcwd():
            os.chdir(basedir)
            needs_chdir = True
            self.log.info("Changing to working dir: %s" % basedir)
            filename = os.path.basename(filename)

        outputdir_name = os.path.splitext(basename)[0] + "_files"

        # parse the input document
        parsed, metadata = self.parse_document(filename)

        # get the output formats
        # order: kwarg overwrites default overwrites document
        output_formats = [self._outputs[self.default_export_format]]
        if output is None:
            self.log.debug("Converting to default output format [%s]!" %
                           self.default_export_format)
        elif output == "all":
            outputs = metadata.get("output", None)
            # if nothing is specified, we keep the default
            if outputs is None:
                self.log.debug(
                    "Did not find any specified output formats: using only default!"
                )
            else:
                output_formats = []
                for fmt_name, config in iteritems(outputs):
                    fod = self.get_output_format(fmt_name, config)
                    output_formats.append(fod)
                self.log.debug(
                    "Converting to all specified output formats: %s" %
                    [fmt.name for fmt in output_formats])
        else:
            self._ensure_valid_output(output)
            output_formats = [self._outputs[output]]

        for final_format in output_formats:
            self.log.info("Converting document %s to %s", filename,
                          final_format.name)
            # TODO: build a proper way to specify final output...

            md_temp = TemporaryOutputDocument(fileoutputs=outputdir_name,
                                              export_config=final_format,
                                              log=self.log,
                                              parent=self)

            # get the temporary md file
            self.convert(parsed, md_temp)
            if final_format.keep_md or self.keep_md:
                mdfilename = basename + "." + final_format.name + ".md"
                self.log.info("Saving the temporary markdown as '%s'." %
                              mdfilename)
                # TODO: remove the first yaml metadata block and
                # put "#<title>\n<author>\n<date>" before the rest
                with codecs.open(mdfilename, 'w+b', 'UTF-8') as f:
                    f.write(md_temp.content)

            # convert the md file to the final filetype
            input_format = "markdown" \
                           "+autolink_bare_uris" \
                           "+ascii_identifiers" \
                           "+tex_math_single_backslash-implicit_figures" \
                           "+fenced_code_attributes"

            extra = [
                "--smart",  # typographically correct output (curly quotes, etc)
                "--email-obfuscation",
                "none",  #do not obfuscation email names with javascript
                "--self-contained",  # include img/scripts as data urls
                "--standalone",  # html with header + footer
                "--section-divs",
            ]

            outfilename = basename + "." + final_format.file_extension

            # exported is irrelevant, as we pass in a filename
            exported = pandoc(source=md_temp.content,
                              to=final_format.pandoc_export_format,
                              format=input_format,
                              extra_args=extra,
                              outputfile=outfilename)
            self.log.info("Written final output: %s" % outfilename)
            converted_docs.append(os.path.join(basedir, outfilename))
        if needs_chdir:
            os.chdir(orig_cwd)
        return converted_docs

    def _ensure_valid_output(self, fmt_name):
        if fmt_name in self._outputs:
            return
        raise KnitpyException("Format '%s' is not a valid output format!" %
                              fmt_name)