Exemplo n.º 1
0
class SyntheticExchangeConfig(ExchangeConfig):
    exchange_type = Instance(ExchangeType)
    adversaries = List(default_value=[])

    direction = Float(default_value=.5)
    volatility = Float(default_value=200)
Exemplo n.º 2
0
class KernelManager(ConnectionFileMixin):
    """Manages a single kernel in a subprocess on this host.

    This version starts kernels with Popen.
    """

    # The PyZMQ Context to use for communication with the kernel.
    context = Instance(zmq.Context)

    def _context_default(self):
        return zmq.Context.instance()

    # the class to create with our `client` method
    client_class = DottedObjectName(
        'jupyter_client.blocking.BlockingKernelClient')
    client_factory = Type(klass='jupyter_client.KernelClient')

    def _client_factory_default(self):
        return import_item(self.client_class)

    def _client_class_changed(self, name, old, new):
        self.client_factory = import_item(str(new))

    # The kernel process with which the KernelManager is communicating.
    # generally a Popen instance
    kernel = Any()

    kernel_spec_manager = Instance(kernelspec.KernelSpecManager)

    def _kernel_spec_manager_default(self):
        return kernelspec.KernelSpecManager(data_dir=self.data_dir)

    def _kernel_spec_manager_changed(self):
        self._kernel_spec = None

    shutdown_wait_time = Float(
        5.0,
        config=True,
        help="Time to wait for a kernel to terminate before killing it, "
        "in seconds.")

    kernel_name = Unicode(kernelspec.NATIVE_KERNEL_NAME)

    def _kernel_name_changed(self, name, old, new):
        self._kernel_spec = None
        if new == 'python':
            self.kernel_name = kernelspec.NATIVE_KERNEL_NAME

    _kernel_spec = None

    @property
    def kernel_spec(self):
        if self._kernel_spec is None:
            self._kernel_spec = self.kernel_spec_manager.get_kernel_spec(
                self.kernel_name)
        return self._kernel_spec

    kernel_cmd = List(Unicode(),
                      config=True,
                      help="""DEPRECATED: Use kernel_name instead.

        The Popen Command to launch the kernel.
        Override this if you have a custom kernel.
        If kernel_cmd is specified in a configuration file,
        Jupyter does not pass any arguments to the kernel,
        because it cannot make any assumptions about the
        arguments that the kernel understands. In particular,
        this means that the kernel does not receive the
        option --debug if it given on the Jupyter command line.
        """)

    def _kernel_cmd_changed(self, name, old, new):
        warnings.warn("Setting kernel_cmd is deprecated, use kernel_spec to "
                      "start different kernels.")

    @property
    def ipykernel(self):
        return self.kernel_name in {'python', 'python2', 'python3'}

    # Protected traits
    _launch_args = Any()
    _control_socket = Any()

    _restarter = Any()

    autorestart = Bool(True,
                       config=True,
                       help="""Should we autorestart the kernel if it dies.""")

    def __del__(self):
        self._close_control_socket()
        self.cleanup_connection_file()

    #--------------------------------------------------------------------------
    # Kernel restarter
    #--------------------------------------------------------------------------

    def start_restarter(self):
        pass

    def stop_restarter(self):
        pass

    def add_restart_callback(self, callback, event='restart'):
        """register a callback to be called when a kernel is restarted"""
        if self._restarter is None:
            return
        self._restarter.add_callback(callback, event)

    def remove_restart_callback(self, callback, event='restart'):
        """unregister a callback to be called when a kernel is restarted"""
        if self._restarter is None:
            return
        self._restarter.remove_callback(callback, event)

    #--------------------------------------------------------------------------
    # create a Client connected to our Kernel
    #--------------------------------------------------------------------------

    def client(self, **kwargs):
        """Create a client configured to connect to our kernel"""
        kw = {}
        kw.update(self.get_connection_info(session=True))
        kw.update(dict(
            connection_file=self.connection_file,
            parent=self,
        ))

        # add kwargs last, for manual overrides
        kw.update(kwargs)
        return self.client_factory(**kw)

    #--------------------------------------------------------------------------
    # Kernel management
    #--------------------------------------------------------------------------

    def format_kernel_cmd(self, extra_arguments=None):
        """replace templated args (e.g. {connection_file})"""
        extra_arguments = extra_arguments or []
        if self.kernel_cmd:
            cmd = self.kernel_cmd + extra_arguments
        else:
            cmd = self.kernel_spec.argv + extra_arguments

        if cmd and cmd[0] in {
                'python',
                'python%i' % sys.version_info[0],
                'python%i.%i' % sys.version_info[:2]
        }:
            # executable is 'python' or 'python3', use sys.executable.
            # These will typically be the same,
            # but if the current process is in an env
            # and has been launched by abspath without
            # activating the env, python on PATH may not be sys.executable,
            # but it should be.
            cmd[0] = sys.executable

        ns = dict(
            connection_file=self.connection_file,
            prefix=sys.prefix,
        )

        if self.kernel_spec:
            ns["resource_dir"] = self.kernel_spec.resource_dir

        ns.update(self._launch_args)

        pat = re.compile(r'\{([A-Za-z0-9_]+)\}')

        def from_ns(match):
            """Get the key out of ns if it's there, otherwise no change."""
            return ns.get(match.group(1), match.group())

        return [pat.sub(from_ns, arg) for arg in cmd]

    def _launch_kernel(self, kernel_cmd, **kw):
        """actually launch the kernel

        override in a subclass to launch kernel subprocesses differently
        """
        return launch_kernel(kernel_cmd, **kw)

    # Control socket used for polite kernel shutdown

    def _connect_control_socket(self):
        if self._control_socket is None:
            self._control_socket = self.connect_control()
            self._control_socket.linger = 100

    def _close_control_socket(self):
        if self._control_socket is None:
            return
        self._control_socket.close()
        self._control_socket = None

    def start_kernel(self, **kw):
        """Starts a kernel on this host in a separate process.

        If random ports (port=0) are being used, this method must be called
        before the channels are created.

        Parameters
        ----------
        `**kw` : optional
             keyword arguments that are passed down to build the kernel_cmd
             and launching the kernel (e.g. Popen kwargs).
        """
        if self.transport == 'tcp' and not is_local_ip(self.ip):
            raise RuntimeError(
                "Can only launch a kernel on a local interface. "
                "Make sure that the '*_address' attributes are "
                "configured properly. "
                "Currently valid addresses are: %s" % local_ips())

        # write connection file / get default ports
        self.write_connection_file()

        # save kwargs for use in restart
        self._launch_args = kw.copy()
        # build the Popen cmd
        extra_arguments = kw.pop('extra_arguments', [])
        kernel_cmd = self.format_kernel_cmd(extra_arguments=extra_arguments)
        env = kw.pop('env', os.environ).copy()
        # Don't allow PYTHONEXECUTABLE to be passed to kernel process.
        # If set, it can bork all the things.
        env.pop('PYTHONEXECUTABLE', None)
        if not self.kernel_cmd:
            # If kernel_cmd has been set manually, don't refer to a kernel spec
            # Environment variables from kernel spec are added to os.environ
            env.update(self.kernel_spec.env or {})

        # launch the kernel subprocess
        self.log.debug("Starting kernel: %s", kernel_cmd)
        self.kernel = self._launch_kernel(kernel_cmd, env=env, **kw)
        self.start_restarter()
        self._connect_control_socket()

    def request_shutdown(self, restart=False):
        """Send a shutdown request via control channel
        """
        content = dict(restart=restart)
        msg = self.session.msg("shutdown_request", content=content)
        # ensure control socket is connected
        self._connect_control_socket()
        self.session.send(self._control_socket, msg)

    def finish_shutdown(self, waittime=None, pollinterval=0.1):
        """Wait for kernel shutdown, then kill process if it doesn't shutdown.

        This does not send shutdown requests - use :meth:`request_shutdown`
        first.
        """
        if waittime is None:
            waittime = max(self.shutdown_wait_time, 0)
        for i in range(int(waittime / pollinterval)):
            if self.is_alive():
                time.sleep(pollinterval)
            else:
                break
        else:
            # OK, we've waited long enough.
            if self.has_kernel:
                self.log.debug("Kernel is taking too long to finish, killing")
                self._kill_kernel()

    def cleanup(self, connection_file=True):
        """Clean up resources when the kernel is shut down"""
        if connection_file:
            self.cleanup_connection_file()

        self.cleanup_ipc_files()
        self._close_control_socket()

    def shutdown_kernel(self, now=False, restart=False):
        """Attempts to stop the kernel process cleanly.

        This attempts to shutdown the kernels cleanly by:

        1. Sending it a shutdown message over the shell channel.
        2. If that fails, the kernel is shutdown forcibly by sending it
           a signal.

        Parameters
        ----------
        now : bool
            Should the kernel be forcible killed *now*. This skips the
            first, nice shutdown attempt.
        restart: bool
            Will this kernel be restarted after it is shutdown. When this
            is True, connection files will not be cleaned up.
        """
        # Stop monitoring for restarting while we shutdown.
        self.stop_restarter()

        if now:
            self._kill_kernel()
        else:
            self.request_shutdown(restart=restart)
            # Don't send any additional kernel kill messages immediately, to give
            # the kernel a chance to properly execute shutdown actions. Wait for at
            # most 1s, checking every 0.1s.
            self.finish_shutdown()

        self.cleanup(connection_file=not restart)

    def restart_kernel(self, now=False, newports=False, **kw):
        """Restarts a kernel with the arguments that were used to launch it.

        Parameters
        ----------
        now : bool, optional
            If True, the kernel is forcefully restarted *immediately*, without
            having a chance to do any cleanup action.  Otherwise the kernel is
            given 1s to clean up before a forceful restart is issued.

            In all cases the kernel is restarted, the only difference is whether
            it is given a chance to perform a clean shutdown or not.

        newports : bool, optional
            If the old kernel was launched with random ports, this flag decides
            whether the same ports and connection file will be used again.
            If False, the same ports and connection file are used. This is
            the default. If True, new random port numbers are chosen and a
            new connection file is written. It is still possible that the newly
            chosen random port numbers happen to be the same as the old ones.

        `**kw` : optional
            Any options specified here will overwrite those used to launch the
            kernel.
        """
        if self._launch_args is None:
            raise RuntimeError("Cannot restart the kernel. "
                               "No previous call to 'start_kernel'.")
        else:
            # Stop currently running kernel.
            self.shutdown_kernel(now=now, restart=True)

            if newports:
                self.cleanup_random_ports()

            # Start new kernel.
            self._launch_args.update(kw)
            self.start_kernel(**self._launch_args)

    @property
    def has_kernel(self):
        """Has a kernel been started that we are managing."""
        return self.kernel is not None

    def _kill_kernel(self):
        """Kill the running kernel.

        This is a private method, callers should use shutdown_kernel(now=True).
        """
        if self.has_kernel:

            # Signal the kernel to terminate (sends SIGKILL on Unix and calls
            # TerminateProcess() on Win32).
            try:
                if hasattr(signal, 'SIGKILL'):
                    self.signal_kernel(signal.SIGKILL)
                else:
                    self.kernel.kill()
            except OSError as e:
                # In Windows, we will get an Access Denied error if the process
                # has already terminated. Ignore it.
                if sys.platform == 'win32':
                    if e.winerror != 5:
                        raise
                # On Unix, we may get an ESRCH error if the process has already
                # terminated. Ignore it.
                else:
                    from errno import ESRCH
                    if e.errno != ESRCH:
                        raise

            # Block until the kernel terminates.
            self.kernel.wait()
            self.kernel = None
        else:
            raise RuntimeError("Cannot kill kernel. No kernel is running!")

    def interrupt_kernel(self):
        """Interrupts the kernel by sending it a signal.

        Unlike ``signal_kernel``, this operation is well supported on all
        platforms.
        """
        if self.has_kernel:
            interrupt_mode = self.kernel_spec.interrupt_mode
            if interrupt_mode == 'signal':
                if sys.platform == 'win32':
                    from .win_interrupt import send_interrupt
                    send_interrupt(self.kernel.win32_interrupt_event)
                else:
                    self.signal_kernel(signal.SIGINT)

            elif interrupt_mode == 'message':
                msg = self.session.msg("interrupt_request", content={})
                self._connect_control_socket()
                self.session.send(self._control_socket, msg)
        else:
            raise RuntimeError(
                "Cannot interrupt kernel. No kernel is running!")

    def signal_kernel(self, signum):
        """Sends a signal to the process group of the kernel (this
        usually includes the kernel and any subprocesses spawned by
        the kernel).

        Note that since only SIGTERM is supported on Windows, this function is
        only useful on Unix systems.
        """
        if self.has_kernel:
            if hasattr(os, "getpgid") and hasattr(os, "killpg"):
                try:
                    pgid = os.getpgid(self.kernel.pid)
                    os.killpg(pgid, signum)
                    return
                except OSError:
                    pass
            self.kernel.send_signal(signum)
        else:
            raise RuntimeError("Cannot signal kernel. No kernel is running!")

    def is_alive(self):
        """Is the kernel process still running?"""
        if self.has_kernel:
            if self.kernel.poll() is None:
                return True
            else:
                return False
        else:
            # we don't have a kernel
            return False
Exemplo n.º 3
0
class ZMQTerminalInteractiveShell(TerminalInteractiveShell):
    """A subclass of TerminalInteractiveShell that uses the 0MQ kernel"""
    _executing = False
    _execution_state = Unicode('')
    _pending_clearoutput = False
    kernel_banner = Unicode('')
    kernel_timeout = Float(
        60,
        config=True,
        help="""Timeout for giving up on a kernel (in seconds).

        On first connect and restart, the console tests whether the
        kernel is running and responsive by sending kernel_info_requests.
        This sets the timeout in seconds for how long the kernel can take
        before being presumed dead.
        """)

    image_handler = Enum(('PIL', 'stream', 'tempfile', 'callable'),
                         config=True,
                         allow_none=True,
                         help="""
        Handler for image type output.  This is useful, for example,
        when connecting to the kernel in which pylab inline backend is
        activated.  There are four handlers defined.  'PIL': Use
        Python Imaging Library to popup image; 'stream': Use an
        external program to show the image.  Image will be fed into
        the STDIN of the program.  You will need to configure
        `stream_image_handler`; 'tempfile': Use an external program to
        show the image.  Image will be saved in a temporally file and
        the program is called with the temporally file.  You will need
        to configure `tempfile_image_handler`; 'callable': You can set
        any Python callable which is called with the image data.  You
        will need to configure `callable_image_handler`.
        """)

    stream_image_handler = List(config=True,
                                help="""
        Command to invoke an image viewer program when you are using
        'stream' image handler.  This option is a list of string where
        the first element is the command itself and reminders are the
        options for the command.  Raw image data is given as STDIN to
        the program.
        """)

    tempfile_image_handler = List(config=True,
                                  help="""
        Command to invoke an image viewer program when you are using
        'tempfile' image handler.  This option is a list of string
        where the first element is the command itself and reminders
        are the options for the command.  You can use {file} and
        {format} in the string to represent the location of the
        generated image file and image format.
        """)

    callable_image_handler = Any(config=True,
                                 help="""
        Callable object called via 'callable' image handler with one
        argument, `data`, which is `msg["content"]["data"]` where
        `msg` is the message from iopub channel.  For exmaple, you can
        find base64 encoded PNG data as `data['image/png']`.
        """)

    mime_preference = List(
        default_value=['image/png', 'image/jpeg', 'image/svg+xml'],
        config=True,
        help="""
        Preferred object representation MIME type in order.  First
        matched MIME type will be used.
        """)

    manager = Instance('jupyter_client.KernelManager', allow_none=True)
    client = Instance('jupyter_client.KernelClient', allow_none=True)

    def _client_changed(self, name, old, new):
        self.session_id = new.session.session

    session_id = Unicode()

    def init_completer(self):
        """Initialize the completion machinery.

        This creates completion machinery that can be used by client code,
        either interactively in-process (typically triggered by the readline
        library), programmatically (such as in test suites) or out-of-process
        (typically over the network by remote frontends).
        """
        from IPython.core.completerlib import (module_completer,
                                               magic_run_completer,
                                               cd_completer)

        self.Completer = ZMQCompleter(self, self.client, config=self.config)

        self.set_hook('complete_command', module_completer, str_key='import')
        self.set_hook('complete_command', module_completer, str_key='from')
        self.set_hook('complete_command', magic_run_completer, str_key='%run')
        self.set_hook('complete_command', cd_completer, str_key='%cd')

        # Only configure readline if we truly are using readline.  IPython can
        # do tab-completion over the network, in GUIs, etc, where readline
        # itself may be absent
        if self.has_readline:
            self.set_readline_completer()

    def run_cell(self, cell, store_history=True):
        """Run a complete IPython cell.

        Parameters
        ----------
        cell : str
          The code (including IPython code such as %magic functions) to run.
        store_history : bool
          If True, the raw and translated cell will be stored in IPython's
          history. For user code calling back into IPython's machinery, this
          should be set to False.
        """
        if (not cell) or cell.isspace():
            # pressing enter flushes any pending display
            self.handle_iopub()
            return

        # flush stale replies, which could have been ignored, due to missed heartbeats
        while self.client.shell_channel.msg_ready():
            self.client.shell_channel.get_msg()
        # execute takes 'hidden', which is the inverse of store_hist
        msg_id = self.client.execute(cell, not store_history)

        # first thing is wait for any side effects (output, stdin, etc.)
        self._executing = True
        self._execution_state = "busy"
        while self._execution_state != 'idle' and self.client.is_alive():
            try:
                self.handle_input_request(msg_id, timeout=0.05)
            except Empty:
                # display intermediate print statements, etc.
                self.handle_iopub(msg_id)
            except ZMQError as e:
                # Carry on if polling was interrupted by a signal
                if e.errno != errno.EINTR:
                    raise

        # after all of that is done, wait for the execute reply
        while self.client.is_alive():
            try:
                self.handle_execute_reply(msg_id, timeout=0.05)
            except Empty:
                pass
            else:
                break
        self._executing = False

    #-----------------
    # message handlers
    #-----------------

    def handle_execute_reply(self, msg_id, timeout=None):
        msg = self.client.shell_channel.get_msg(block=False, timeout=timeout)
        if msg["parent_header"].get("msg_id", None) == msg_id:

            self.handle_iopub(msg_id)

            content = msg["content"]
            status = content['status']

            if status == 'aborted':
                self.write('Aborted\n')
                return
            elif status == 'ok':
                # handle payloads
                for item in content.get("payload", []):
                    source = item['source']
                    if source == 'page':
                        page.page(item['data']['text/plain'])
                    elif source == 'set_next_input':
                        self.set_next_input(item['text'])
                    elif source == 'ask_exit':
                        self.keepkernel = item.get('keepkernel', False)
                        self.ask_exit()

            elif status == 'error':
                pass

            self.execution_count = int(content["execution_count"] + 1)

    include_other_output = Bool(False,
                                config=True,
                                help="""Whether to include output from clients
        other than this one sharing the same kernel.

        Outputs are not displayed until enter is pressed.
        """)
    other_output_prefix = Unicode(
        "[remote] ",
        config=True,
        help="""Prefix to add to outputs coming from clients other than this one.

        Only relevant if include_other_output is True.
        """)

    def from_here(self, msg):
        """Return whether a message is from this session"""
        return msg['parent_header'].get("session",
                                        self.session_id) == self.session_id

    def include_output(self, msg):
        """Return whether we should include a given output message"""
        from_here = self.from_here(msg)
        if msg['msg_type'] == 'execute_input':
            # only echo inputs not from here
            return self.include_other_output and not from_here

        if self.include_other_output:
            return True
        else:
            return from_here

    def handle_iopub(self, msg_id=''):
        """Process messages on the IOPub channel

           This method consumes and processes messages on the IOPub channel,
           such as stdout, stderr, execute_result and status.

           It only displays output that is caused by this session.
        """
        while self.client.iopub_channel.msg_ready():
            sub_msg = self.client.iopub_channel.get_msg()
            msg_type = sub_msg['header']['msg_type']
            parent = sub_msg["parent_header"]

            if self.include_output(sub_msg):
                if msg_type == 'status':
                    self._execution_state = sub_msg["content"][
                        "execution_state"]
                elif msg_type == 'stream':
                    if sub_msg["content"]["name"] == "stdout":
                        if self._pending_clearoutput:
                            print("\r", file=io.stdout, end="")
                            self._pending_clearoutput = False
                        print(sub_msg["content"]["text"],
                              file=io.stdout,
                              end="")
                        io.stdout.flush()
                    elif sub_msg["content"]["name"] == "stderr":
                        if self._pending_clearoutput:
                            print("\r", file=io.stderr, end="")
                            self._pending_clearoutput = False
                        print(sub_msg["content"]["text"],
                              file=io.stderr,
                              end="")
                        io.stderr.flush()

                elif msg_type == 'execute_result':
                    if self._pending_clearoutput:
                        print("\r", file=io.stdout, end="")
                        self._pending_clearoutput = False
                    self.execution_count = int(
                        sub_msg["content"]["execution_count"])
                    if not self.from_here(sub_msg):
                        sys.stdout.write(self.other_output_prefix)
                    format_dict = sub_msg["content"]["data"]
                    self.handle_rich_data(format_dict)

                    # taken from DisplayHook.__call__:
                    hook = self.displayhook
                    hook.start_displayhook()
                    hook.write_output_prompt()
                    hook.write_format_data(format_dict)
                    hook.log_output(format_dict)
                    hook.finish_displayhook()

                elif msg_type == 'display_data':
                    data = sub_msg["content"]["data"]
                    handled = self.handle_rich_data(data)
                    if not handled:
                        if not self.from_here(sub_msg):
                            sys.stdout.write(self.other_output_prefix)
                        # if it was an image, we handled it by now
                        if 'text/plain' in data:
                            print(data['text/plain'])

                elif msg_type == 'execute_input':
                    content = sub_msg['content']
                    self.execution_count = content['execution_count']
                    if not self.from_here(sub_msg):
                        sys.stdout.write(self.other_output_prefix)
                    sys.stdout.write(self.prompt_manager.render('in'))
                    sys.stdout.write(content['code'])

                elif msg_type == 'clear_output':
                    if sub_msg["content"]["wait"]:
                        self._pending_clearoutput = True
                    else:
                        print("\r", file=io.stdout, end="")

                elif msg_type == 'error':
                    for frame in sub_msg["content"]["traceback"]:
                        print(frame, file=io.stderr)

    _imagemime = {
        'image/png': 'png',
        'image/jpeg': 'jpeg',
        'image/svg+xml': 'svg',
    }

    def handle_rich_data(self, data):
        for mime in self.mime_preference:
            if mime in data and mime in self._imagemime:
                self.handle_image(data, mime)
                return True

    def handle_image(self, data, mime):
        handler = getattr(self, 'handle_image_{0}'.format(self.image_handler),
                          None)
        if handler:
            handler(data, mime)

    def handle_image_PIL(self, data, mime):
        if mime not in ('image/png', 'image/jpeg'):
            return
        import PIL.Image
        raw = base64.decodestring(data[mime].encode('ascii'))
        img = PIL.Image.open(BytesIO(raw))
        img.show()

    def handle_image_stream(self, data, mime):
        raw = base64.decodestring(data[mime].encode('ascii'))
        imageformat = self._imagemime[mime]
        fmt = dict(format=imageformat)
        args = [s.format(**fmt) for s in self.stream_image_handler]
        with open(os.devnull, 'w') as devnull:
            proc = subprocess.Popen(args,
                                    stdin=subprocess.PIPE,
                                    stdout=devnull,
                                    stderr=devnull)
            proc.communicate(raw)

    def handle_image_tempfile(self, data, mime):
        raw = base64.decodestring(data[mime].encode('ascii'))
        imageformat = self._imagemime[mime]
        filename = 'tmp.{0}'.format(imageformat)
        with NamedFileInTemporaryDirectory(filename) as f, \
                    open(os.devnull, 'w') as devnull:
            f.write(raw)
            f.flush()
            fmt = dict(file=f.name, format=imageformat)
            args = [s.format(**fmt) for s in self.tempfile_image_handler]
            subprocess.call(args, stdout=devnull, stderr=devnull)

    def handle_image_callable(self, data, mime):
        self.callable_image_handler(data)

    def handle_input_request(self, msg_id, timeout=0.1):
        """ Method to capture raw_input
        """
        req = self.client.stdin_channel.get_msg(timeout=timeout)
        # in case any iopub came while we were waiting:
        self.handle_iopub(msg_id)
        if msg_id == req["parent_header"].get("msg_id"):
            # wrap SIGINT handler
            real_handler = signal.getsignal(signal.SIGINT)

            def double_int(sig, frame):
                # call real handler (forwards sigint to kernel),
                # then raise local interrupt, stopping local raw_input
                real_handler(sig, frame)
                raise KeyboardInterrupt

            signal.signal(signal.SIGINT, double_int)
            content = req['content']
            read = getpass if content.get('password', False) else input
            try:
                raw_data = read(content["prompt"])
            except EOFError:
                # turn EOFError into EOF character
                raw_data = '\x04'
            except KeyboardInterrupt:
                sys.stdout.write('\n')
                return
            finally:
                # restore SIGINT handler
                signal.signal(signal.SIGINT, real_handler)

            # only send stdin reply if there *was not* another request
            # or execution finished while we were reading.
            if not (self.client.stdin_channel.msg_ready()
                    or self.client.shell_channel.msg_ready()):
                self.client.input(raw_data)

    def mainloop(self, display_banner=False):
        self.keepkernel = False
        while True:
            try:
                self.interact(display_banner=display_banner)
                #self.interact_with_readline()
                # XXX for testing of a readline-decoupled repl loop, call
                # interact_with_readline above
                break
            except KeyboardInterrupt:
                # this should not be necessary, but KeyboardInterrupt
                # handling seems rather unpredictable...
                self.write("\nKeyboardInterrupt in interact()\n")

        if self.keepkernel and not self.own_kernel:
            print('keeping kernel alive')
        elif self.keepkernel and self.own_kernel:
            print("owning kernel, cannot keep it alive")
            self.client.shutdown()
        else:
            print("Shutting down kernel")
            self.client.shutdown()

    def _banner1_default(self):
        return "Jupyter Console {version}\n".format(version=__version__)

    def compute_banner(self):
        super(ZMQTerminalInteractiveShell, self).compute_banner()
        if self.client and not self.kernel_banner:
            msg_id = self.client.kernel_info()
            while True:
                try:
                    reply = self.client.get_shell_msg(timeout=1)
                except Empty:
                    break
                else:
                    if reply['parent_header'].get('msg_id') == msg_id:
                        self.kernel_banner = reply['content'].get('banner', '')
                        break
        self.banner += self.kernel_banner

    def wait_for_kernel(self, timeout=None):
        """method to wait for a kernel to be ready"""
        tic = time.time()
        self.client.hb_channel.unpause()
        while True:
            msg_id = self.client.kernel_info()
            reply = None
            while True:
                try:
                    reply = self.client.get_shell_msg(timeout=1)
                except Empty:
                    break
                else:
                    if reply['parent_header'].get('msg_id') == msg_id:
                        return True
            if timeout is not None \
                and (time.time() - tic) > timeout \
                and not self.client.hb_channel.is_beating():
                # heart failed
                return False
        return True

    def interact(self, display_banner=None):
        """Closely emulate the interactive Python console."""

        # batch run -> do not interact
        if self.exit_now:
            return

        if display_banner is None:
            display_banner = self.display_banner

        if isinstance(display_banner, string_types):
            self.show_banner(display_banner)
        elif display_banner:
            self.show_banner()

        more = False

        # run a non-empty no-op, so that we don't get a prompt until
        # we know the kernel is ready. This keeps the connection
        # message above the first prompt.
        if not self.wait_for_kernel(self.kernel_timeout):
            error("Kernel did not respond\n")
            return

        if self.has_readline:
            self.readline_startup_hook(self.pre_readline)
            hlen_b4_cell = self.readline.get_current_history_length()
        else:
            hlen_b4_cell = 0
        # exit_now is set by a call to %Exit or %Quit, through the
        # ask_exit callback.

        while not self.exit_now:
            if not self.client.is_alive():
                # kernel died, prompt for action or exit

                action = "restart" if self.manager else "wait for restart"
                ans = self.ask_yes_no("kernel died, %s ([y]/n)?" % action,
                                      default='y')
                if ans:
                    if self.manager:
                        self.manager.restart_kernel(True)
                    self.wait_for_kernel(self.kernel_timeout)
                else:
                    self.exit_now = True
                continue
            try:
                # protect prompt block from KeyboardInterrupt
                # when sitting on ctrl-C
                self.hooks.pre_prompt_hook()
                if more:
                    try:
                        prompt = self.prompt_manager.render('in2')
                    except Exception:
                        self.showtraceback()
                    if self.autoindent:
                        self.rl_do_indent = True

                else:
                    try:
                        prompt = self.separate_in + self.prompt_manager.render(
                            'in')
                    except Exception:
                        self.showtraceback()

                line = self.raw_input(prompt)
                if self.exit_now:
                    # quick exit on sys.std[in|out] close
                    break
                if self.autoindent:
                    self.rl_do_indent = False

            except KeyboardInterrupt:
                #double-guard against keyboardinterrupts during kbdint handling
                try:
                    self.write('\n' + self.get_exception_only())
                    source_raw = self.input_splitter.raw_reset()
                    hlen_b4_cell = self._replace_rlhist_multiline(
                        source_raw, hlen_b4_cell)
                    more = False
                except KeyboardInterrupt:
                    pass
            except EOFError:
                if self.autoindent:
                    self.rl_do_indent = False
                    if self.has_readline:
                        self.readline_startup_hook(None)
                self.write('\n')
                self.exit()
            except bdb.BdbQuit:
                warn(
                    'The Python debugger has exited with a BdbQuit exception.\n'
                    'Because of how pdb handles the stack, it is impossible\n'
                    'for IPython to properly format this particular exception.\n'
                    'IPython will resume normal operation.')
            except:
                # exceptions here are VERY RARE, but they can be triggered
                # asynchronously by signal handlers, for example.
                self.showtraceback()
            else:
                try:
                    self.input_splitter.push(line)
                    more = self.input_splitter.push_accepts_more()
                except SyntaxError:
                    # Run the code directly - run_cell takes care of displaying
                    # the exception.
                    more = False
                if (self.SyntaxTB.last_syntax_error and self.autoedit_syntax):
                    self.edit_syntax_error()
                if not more:
                    source_raw = self.input_splitter.raw_reset()
                    hlen_b4_cell = self._replace_rlhist_multiline(
                        source_raw, hlen_b4_cell)
                    self.run_cell(source_raw)

        # Turn off the exit flag, so the mainloop can be restarted if desired
        self.exit_now = False

    def init_history(self):
        """Sets up the command history. """
        self.history_manager = ZMQHistoryManager(client=self.client)
        self.configurables.append(self.history_manager)
Exemplo n.º 4
0
class Plot(widgets.DOMWidget):
    """
    Main K3D widget.

    Attributes:
        antialias: `int`:
            Enable antialiasing in WebGL renderer, changes have no effect after displaying.
        height: `int`:
            Height of the Widget in pixels, changes have no effect after displaying.
        background_color: `int`.
            Packed RGB color of the plot background (0xff0000 is red, 0xff is blue), -1 is for transparent.
        camera_auto_fit: `bool`.
            Enable automatic camera setting after adding, removing or changing a plot object.
        grid_auto_fit: `bool`.
            Enable automatic adjustment of the plot grid to contained objects.
        grid_visible: `bool`.
            Enable or disable grid.
        screenshot_scale: `Float`.
            Multipiler to screenshot resolution.
        voxel_paint_color: `int`.
            The (initial) int value to be inserted when editing voxels.
        grid: `array_like`.
            6-element tuple specifying the bounds of the plot grid (x0, y0, z0, x1, y1, z1).
        camera: `array_like`.
            9-element list or array specifying camera position.
        camera_no_rotate: `Bool`.
            Lock for camera rotation.
        camera_no_zoom: `Bool`.
            Lock for camera zoom.
        camera_no_pan: `Bool`.
            Lock for camera pan.
        axes: `list`.
            Axes labels for plot.
        objects: `list`.
            List of `k3d.objects.Drawable` currently included in the plot, not to be changed directly.
    """

    _view_name = Unicode('PlotView').tag(sync=True)
    _model_name = Unicode('PlotModel').tag(sync=True)
    _view_module = Unicode('k3d').tag(sync=True)
    _model_module = Unicode('k3d').tag(sync=True)

    _view_module_version = Unicode(version).tag(sync=True)
    _model_module_version = Unicode(version).tag(sync=True)
    _backend_version = Unicode(version).tag(sync=True)

    # readonly (specified at creation)
    antialias = Int(min=0, max=5).tag(sync=True)
    height = Int().tag(sync=True)

    # readonly (not to be modified directly)
    object_ids = List().tag(sync=True)

    # read-write
    camera_auto_fit = Bool(True).tag(sync=True)
    lighting = Float().tag(sync=True)
    grid_auto_fit = Bool(True).tag(sync=True)
    grid_visible = Bool(True).tag(sync=True)
    fps_meter = Bool(True).tag(sync=True)
    menu_visibility = Bool(True).tag(sync=True)
    screenshot_scale = Float().tag(sync=True)
    time = Float().tag(sync=True)
    grid = ListOrArray((-1, -1, -1, 1, 1, 1), minlen=6,
                       maxlen=6).tag(sync=True)
    background_color = Int().tag(sync=True)
    voxel_paint_color = Int().tag(sync=True)
    camera = ListOrArray(minlen=9, maxlen=9, empty_ok=True).tag(sync=True)
    camera_no_rotate = Bool(False).tag(sync=True)
    camera_no_zoom = Bool(False).tag(sync=True)
    camera_no_pan = Bool(False).tag(sync=True)
    clipping_planes = ListOrArray(empty_ok=True).tag(sync=True)
    colorbar_object_id = Int(-1).tag(sync=True)
    rendering_steps = Int(1).tag(sync=True)
    screenshot = Unicode().tag(sync=True)
    axes = List(minlen=3, maxlen=3, default_value=['x', 'y',
                                                   'z']).tag(sync=True)

    objects = []

    def __init__(self,
                 antialias=3,
                 background_color=0xFFFFFF,
                 camera_auto_fit=True,
                 grid_auto_fit=True,
                 grid_visible=True,
                 height=512,
                 voxel_paint_color=0,
                 grid=(-1, -1, -1, 1, 1, 1),
                 screenshot_scale=2.0,
                 lighting=1.0,
                 time=0.0,
                 fps_meter=False,
                 menu_visibility=True,
                 colorbar_object_id=-1,
                 rendering_steps=1,
                 axes=['x', 'y', 'z'],
                 camera_no_rotate=False,
                 camera_no_zoom=False,
                 camera_no_pan=False,
                 *args,
                 **kwargs):
        super(Plot, self).__init__()

        self.antialias = antialias
        self.camera_auto_fit = camera_auto_fit
        self.grid_auto_fit = grid_auto_fit
        self.fps_meter = fps_meter
        self.grid = grid
        self.grid_visible = grid_visible
        self.background_color = background_color
        self.voxel_paint_color = voxel_paint_color
        self.screenshot_scale = screenshot_scale
        self.height = height
        self.lighting = lighting
        self.time = time
        self.menu_visibility = menu_visibility
        self.colorbar_object_id = colorbar_object_id
        self.rendering_steps = rendering_steps
        self.camera_no_rotate = camera_no_rotate
        self.camera_no_zoom = camera_no_zoom
        self.camera_no_pan = camera_no_pan
        self.axes = axes
        self.camera = [4.5, 4.5, 4.5, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0]

        self.object_ids = []
        self.objects = []

        self.outputs = []

    def __iadd__(self, objs):
        assert isinstance(objs, Drawable)

        for obj in objs:
            if obj.id not in self.object_ids:
                self.object_ids = self.object_ids + [obj.id]
                self.objects.append(obj)

        return self

    def __isub__(self, objs):
        assert isinstance(objs, Drawable)

        for obj in objs:
            self.object_ids = [id_ for id_ in self.object_ids if id_ != obj.id]
            if obj in self.objects:
                self.objects.remove(obj)

        return self

    def display(self, **kwargs):
        output = widgets.Output()

        with output:
            display(self, **kwargs)

        self.outputs.append(output)

        display(output)

    def close(self):
        for output in self.outputs:
            output.clear_output()

        self.outputs = []

    def reset_camera(self):
        self.send({'msg_type': 'reset_camera'})

    def fetch_screenshot(self, only_canvas=False):
        self.send({'msg_type': 'fetch_screenshot', 'only_canvas': only_canvas})

    def yield_screenshots(self, generator_function):
        """Decorator for a generator function receiving screenshots via yield."""
        @wraps(generator_function)
        def inner():
            generator = generator_function()

            def send_new_value(change):
                try:
                    generator.send(base64.b64decode(change.new))
                except StopIteration:
                    self.unobserve(send_new_value, 'screenshot')

            self.observe(send_new_value, 'screenshot')
            # start the decorated generator
            generator.send(None)

        return inner
Exemplo n.º 5
0
class Map(DOMWidget, InteractMixin):
    _view_name = Unicode('LeafletMapView').tag(sync=True)
    _model_name = Unicode('LeafletMapModel').tag(sync=True)
    _view_module = Unicode('jupyter-leaflet').tag(sync=True)
    _model_module = Unicode('jupyter-leaflet').tag(sync=True)

    _view_module_version = Unicode(EXTENSION_VERSION).tag(sync=True)
    _model_module_version = Unicode(EXTENSION_VERSION).tag(sync=True)

    # Map options
    center = List(def_loc).tag(sync=True, o=True)
    zoom_start = Int(12).tag(sync=True, o=True)
    zoom = Int(12).tag(sync=True, o=True)
    max_zoom = Int(18).tag(sync=True, o=True)
    min_zoom = Int(1).tag(sync=True, o=True)
    interpolation = Unicode('bilinear').tag(sync=True, o=True)
    crs = Enum(values=allowed_crs, default_value='EPSG3857').tag(sync=True)

    # Specification of the basemap
    basemap = Dict(default_value=dict(
        url='https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
        max_zoom=19,
        attribution=
        'Map data (c) <a href="https://openstreetmap.org">OpenStreetMap</a> contributors'
    )).tag(sync=True, o=True)
    modisdate = Unicode('yesterday').tag(sync=True)

    # Interaction options
    dragging = Bool(True).tag(sync=True, o=True)
    touch_zoom = Bool(True).tag(sync=True, o=True)
    scroll_wheel_zoom = Bool(False).tag(sync=True, o=True)
    double_click_zoom = Bool(True).tag(sync=True, o=True)
    box_zoom = Bool(True).tag(sync=True, o=True)
    tap = Bool(True).tag(sync=True, o=True)
    tap_tolerance = Int(15).tag(sync=True, o=True)
    world_copy_jump = Bool(False).tag(sync=True, o=True)
    close_popup_on_click = Bool(True).tag(sync=True, o=True)
    bounce_at_zoom_limits = Bool(True).tag(sync=True, o=True)
    keyboard = Bool(True).tag(sync=True, o=True)
    keyboard_pan_offset = Int(80).tag(sync=True, o=True)
    keyboard_zoom_offset = Int(1).tag(sync=True, o=True)
    inertia = Bool(True).tag(sync=True, o=True)
    inertia_deceleration = Int(3000).tag(sync=True, o=True)
    inertia_max_speed = Int(1500).tag(sync=True, o=True)
    # inertia_threshold = Int(?, o=True).tag(sync=True)
    # fade_animation = Bool(?).tag(sync=True, o=True)
    # zoom_animation = Bool(?).tag(sync=True, o=True)
    zoom_animation_threshold = Int(4).tag(sync=True, o=True)
    # marker_zoom_animation = Bool(?).tag(sync=True, o=True)
    fullscreen = Bool(False).tag(sync=True, o=True)

    options = List(trait=Unicode()).tag(sync=True)

    style = InstanceDict(MapStyle).tag(sync=True, **widget_serialization)
    default_style = InstanceDict(MapStyle).tag(sync=True,
                                               **widget_serialization)
    dragging_style = InstanceDict(MapStyle).tag(sync=True,
                                                **widget_serialization)

    zoom_control = Bool(True)
    zoom_control_instance = ZoomControl()

    attribution_control = Bool(True)
    attribution_control_instance = AttributionControl(position='bottomright')

    @default('dragging_style')
    def _default_dragging_style(self):
        return {'cursor': 'move'}

    @default('options')
    def _default_options(self):
        return [name for name in self.traits(o=True)]

    south = Float(def_loc[0], read_only=True).tag(sync=True)
    north = Float(def_loc[0], read_only=True).tag(sync=True)
    east = Float(def_loc[1], read_only=True).tag(sync=True)
    west = Float(def_loc[1], read_only=True).tag(sync=True)

    layers = Tuple().tag(trait=Instance(Layer),
                         sync=True,
                         **widget_serialization)

    @default('layers')
    def _default_layers(self):
        return (basemap_to_tiles(self.basemap, self.modisdate, base=True), )

    bounds = Tuple(read_only=True)
    bounds_polygon = Tuple(read_only=True)

    @observe('south', 'north', 'east', 'west')
    def _observe_bounds(self, change):
        self.set_trait('bounds',
                       ((self.south, self.west), (self.north, self.east)))
        self.set_trait('bounds_polygon',
                       ((self.north, self.west), (self.north, self.east),
                        (self.south, self.east), (self.south, self.west)))

    def __init__(self, **kwargs):
        super(Map, self).__init__(**kwargs)
        self.on_displayed(self._fire_children_displayed)
        self.on_msg(self._handle_leaflet_event)

        if self.zoom_control:
            self.add_control(self.zoom_control_instance)

        if self.attribution_control:
            self.add_control(self.attribution_control_instance)

    @observe('zoom_control')
    def observe_zoom_control(self, change):
        if change['new']:
            self.add_control(self.zoom_control_instance)
        else:
            if self.zoom_control_instance in self.controls:
                self.remove_control(self.zoom_control_instance)

    @observe('attribution_control')
    def observe_attribution_control(self, change):
        if change['new']:
            self.add_control(self.attribution_control_instance)
        else:
            if self.attribution_control_instance in self.controls:
                self.remove_control(self.attribution_control_instance)

    def _fire_children_displayed(self, widget, **kwargs):
        for layer in self.layers:
            layer._handle_displayed(**kwargs)
        for control in self.controls:
            control._handle_displayed(**kwargs)

    _layer_ids = List()

    @validate('layers')
    def _validate_layers(self, proposal):
        '''Validate layers list.

        Makes sure only one instance of any given layer can exist in the
        layers list.
        '''
        self._layer_ids = [l.model_id for l in proposal.value]
        if len(set(self._layer_ids)) != len(self._layer_ids):
            raise LayerException(
                'duplicate layer detected, only use each layer once')
        return proposal.value

    def add_layer(self, layer):
        if isinstance(layer, dict):
            layer = basemap_to_tiles(layer)
        if layer.model_id in self._layer_ids:
            raise LayerException('layer already on map: %r' % layer)
        self.layers = tuple([l for l in self.layers] + [layer])

    def remove_layer(self, layer):
        if layer.model_id not in self._layer_ids:
            raise LayerException('layer not on map: %r' % layer)
        self.layers = tuple(
            [l for l in self.layers if l.model_id != layer.model_id])

    def substitute_layer(self, old, new):
        if isinstance(new, dict):
            new = basemap_to_tiles(new)
        if old.model_id not in self._layer_ids:
            raise LayerException(
                'Could not substitute layer: layer not on map.')
        self.layers = tuple(
            [new if l.model_id == old.model_id else l for l in self.layers])

    def clear_layers(self):
        self.layers = ()

    controls = Tuple().tag(trait=Instance(Control),
                           sync=True,
                           **widget_serialization)
    _control_ids = List()

    @validate('controls')
    def _validate_controls(self, proposal):
        '''Validate controls list.

        Makes sure only one instance of any given layer can exist in the
        controls list.
        '''
        self._control_ids = [c.model_id for c in proposal.value]
        if len(set(self._control_ids)) != len(self._control_ids):
            raise ControlException(
                'duplicate control detected, only use each control once')
        return proposal.value

    def add_control(self, control):
        if control.model_id in self._control_ids:
            raise ControlException('control already on map: %r' % control)
        self.controls = tuple([c for c in self.controls] + [control])

    def remove_control(self, control):
        if control.model_id not in self._control_ids:
            raise ControlException('control not on map: %r' % control)
        self.controls = tuple(
            [c for c in self.controls if c.model_id != control.model_id])

    def clear_controls(self):
        self.controls = ()

    def __iadd__(self, item):
        if isinstance(item, Layer):
            self.add_layer(item)
        elif isinstance(item, Control):
            self.add_control(item)
        return self

    def __isub__(self, item):
        if isinstance(item, Layer):
            self.remove_layer(item)
        elif isinstance(item, Control):
            self.remove_control(item)
        return self

    def __add__(self, item):
        if isinstance(item, Layer):
            self.add_layer(item)
        elif isinstance(item, Control):
            self.add_control(item)
        return self

    # Event handling
    _interaction_callbacks = Instance(CallbackDispatcher, ())

    def _handle_leaflet_event(self, _, content, buffers):
        if content.get('event', '') == 'interaction':
            self._interaction_callbacks(**content)

    def on_interaction(self, callback, remove=False):
        self._interaction_callbacks.register_callback(callback, remove=remove)
Exemplo n.º 6
0
class PodReflector(SingletonConfigurable):
    request_timeout = Float(
        60,
        help="""
        Network timeout for kubernetes watch.

        Trigger watch reconnect when a given request is taking too long,
        which can indicate network issues.
        """,
        config=True,
    )

    timeout_seconds = Int(
        10,
        help="""
        Timeout (in seconds) for kubernetes watch.

        Trigger watch reconnect when no watch event has been received.
        This will cause a full reload of the currently existing resources
        from the API server.
        """,
        config=True,
    )

    restart_seconds = Float(
        30,
        help="Maximum time (in seconds) before restarting a watch.",
        config=True)

    kube_client = Instance(kubernetes.client.CoreV1Api)
    label_selector = Unicode()
    namespace = Unicode()

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.first_load_future = get_running_loop().create_future()
        self.stopped = False
        self.start()

    def __del__(self):
        self.stop()

    def _list_and_update(self):
        initial_resources = self.kube_client.list_namespaced_pod(
            self.namespace,
            label_selector=self.label_selector,
            _request_timeout=self.request_timeout,
        )
        self.pods = {p.metadata.name: p for p in initial_resources.items}
        # return the resource version so we can hook up a watch
        return initial_resources.metadata.resource_version

    def _watch_and_update(self):
        self.log.info("Starting pod watcher...")
        cur_delay = 0.1
        while not self.stopped:
            start = time.monotonic()
            watch = kubernetes.watch.Watch()
            try:
                resource_version = self._list_and_update()
                if not self.first_load_future.done():
                    # signal that we've loaded our initial data
                    self.first_load_future.set_result(None)
                kwargs = {
                    "namespace": self.namespace,
                    "label_selector": self.label_selector,
                    "resource_version": resource_version,
                    "_request_timeout": self.request_timeout,
                    "timeout_seconds": self.timeout_seconds,
                }
                for ev in watch.stream(self.kube_client.list_namespaced_pod,
                                       **kwargs):
                    cur_delay = 0.1
                    pod = ev["object"]
                    if ev["type"] == "DELETED":
                        self.pods.pop(pod.metadata.name, None)
                    else:
                        self.pods[pod.metadata.name] = pod
                    if self.stopped:
                        # Check in inner loop to provide faster shutdown
                        break
                    watch_duration = time.monotonic() - start
                    if watch_duration >= self.restart_seconds:
                        self.log.debug(
                            "Restarting pod watcher after %.1f seconds",
                            watch_duration)
                        break
            except ReadTimeoutError:
                # network read time out, just continue and restart the watch
                # this could be due to a network problem or just low activity
                self.log.warning("Read timeout watching pods, reconnecting")
                continue
            except Exception as exc:
                if cur_delay < 30:
                    cur_delay = cur_delay * 2
                self.log.error(
                    "Error when watching pods, retrying in %.1f seconds...",
                    cur_delay,
                    exc_info=exc,
                )
                time.sleep(cur_delay)
                continue
            else:
                # no events on watch, reconnect
                self.log.debug("Pod watcher timeout, restarting")
            finally:
                watch.stop()
        self.log.debug("Pod watcher stopped")

    def start(self):
        self.watch_thread = threading.Thread(target=self._watch_and_update,
                                             daemon=True)
        self.watch_thread.start()

    def stop(self):
        self.stopped = True
Exemplo n.º 7
0
class TerminalInteractiveShell(InteractiveShell):
    mime_renderers = Dict().tag(config=True)

    space_for_menu = Integer(
        6,
        help='Number of line at the bottom of the screen '
        'to reserve for the tab completion menu, '
        'search history, ...etc, the height of '
        'these menus will at most this value. '
        'Increase it is you prefer long and skinny '
        'menus, decrease for short and wide.').tag(config=True)

    pt_app = None
    debugger_history = None

    debugger_history_file = Unicode(
        "~/.pdbhistory",
        help="File in which to store and read history").tag(config=True)

    simple_prompt = Bool(
        _use_simple_prompt,
        help=
        """Use `raw_input` for the REPL, without completion and prompt colors.

            Useful when controlling IPython as a subprocess, and piping STDIN/OUT/ERR. Known usage are:
            IPython own testing machinery, and emacs inferior-shell integration through elpy.

            This mode default to `True` if the `IPY_TEST_SIMPLE_PROMPT`
            environment variable is set, or the current terminal is not a tty."""
    ).tag(config=True)

    @property
    def debugger_cls(self):
        return Pdb if self.simple_prompt else TerminalPdb

    confirm_exit = Bool(
        True,
        help="""
        Set to confirm when you try to exit IPython with an EOF (Control-D
        in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit',
        you can force a direct exit without any confirmation.""",
    ).tag(config=True)

    editing_mode = Unicode(
        'emacs',
        help="Shortcut style to use at the prompt. 'vi' or 'emacs'.",
    ).tag(config=True)

    emacs_bindings_in_vi_insert_mode = Bool(
        True,
        help="Add shortcuts from 'emacs' insert mode to 'vi' insert mode.",
    ).tag(config=True)

    modal_cursor = Bool(
        True,
        help="""
       Cursor shape changes depending on vi mode: beam in vi insert mode,
       block in nav mode, underscore in replace mode.""",
    ).tag(config=True)

    ttimeoutlen = Float(
        0.01,
        help="""The time in milliseconds that is waited for a key code
       to complete.""",
    ).tag(config=True)

    timeoutlen = Float(
        0.5,
        help="""The time in milliseconds that is waited for a mapped key
       sequence to complete.""",
    ).tag(config=True)

    autoformatter = Unicode(
        None,
        help=
        "Autoformatter to reformat Terminal code. Can be `'black'` or `None`",
        allow_none=True).tag(config=True)

    mouse_support = Bool(
        False,
        help=
        "Enable mouse support in the prompt\n(Note: prevents selecting text with the mouse)"
    ).tag(config=True)

    # We don't load the list of styles for the help string, because loading
    # Pygments plugins takes time and can cause unexpected errors.
    highlighting_style = Union(
        [Unicode('legacy'), Type(klass=Style)],
        help="""The name or class of a Pygments style to use for syntax
        highlighting. To see available styles, run `pygmentize -L styles`."""
    ).tag(config=True)

    @validate('editing_mode')
    def _validate_editing_mode(self, proposal):
        if proposal['value'].lower() == 'vim':
            proposal['value'] = 'vi'
        elif proposal['value'].lower() == 'default':
            proposal['value'] = 'emacs'

        if hasattr(EditingMode, proposal['value'].upper()):
            return proposal['value'].lower()

        return self.editing_mode

    @observe('editing_mode')
    def _editing_mode(self, change):
        if self.pt_app:
            self.pt_app.editing_mode = getattr(EditingMode, change.new.upper())

    @observe('autoformatter')
    def _autoformatter_changed(self, change):
        formatter = change.new
        if formatter is None:
            self.reformat_handler = lambda x: x
        elif formatter == 'black':
            self.reformat_handler = black_reformat_handler
        else:
            raise ValueError

    @observe('highlighting_style')
    @observe('colors')
    def _highlighting_style_changed(self, change):
        self.refresh_style()

    def refresh_style(self):
        self._style = self._make_style_from_name_or_cls(
            self.highlighting_style)

    highlighting_style_overrides = Dict(
        help="Override highlighting format for specific tokens").tag(
            config=True)

    true_color = Bool(
        False,
        help="""Use 24bit colors instead of 256 colors in prompt highlighting.
        If your terminal supports true color, the following command should
        print ``TRUECOLOR`` in orange::

            printf \"\\x1b[38;2;255;100;0mTRUECOLOR\\x1b[0m\\n\"
        """,
    ).tag(config=True)

    editor = Unicode(
        get_default_editor(),
        help="Set the editor used by IPython (default to $EDITOR/vi/notepad)."
    ).tag(config=True)

    prompts_class = Type(
        Prompts,
        help='Class used to generate Prompt token for prompt_toolkit').tag(
            config=True)

    prompts = Instance(Prompts)

    @default('prompts')
    def _prompts_default(self):
        return self.prompts_class(self)

#    @observe('prompts')
#    def _(self, change):
#        self._update_layout()

    @default('displayhook_class')
    def _displayhook_class_default(self):
        return RichPromptDisplayHook

    term_title = Bool(
        True, help="Automatically set the terminal title").tag(config=True)

    term_title_format = Unicode(
        "IPython: {cwd}",
        help=
        "Customize the terminal title format.  This is a python format string. "
        + "Available substitutions are: {cwd}.").tag(config=True)

    display_completions = Enum(
        ('column', 'multicolumn', 'readlinelike'),
        help=
        ("Options for displaying tab completions, 'column', 'multicolumn', and "
         "'readlinelike'. These options are for `prompt_toolkit`, see "
         "`prompt_toolkit` documentation for more information."),
        default_value='multicolumn').tag(config=True)

    highlight_matching_brackets = Bool(
        True,
        help="Highlight matching brackets.",
    ).tag(config=True)

    extra_open_editor_shortcuts = Bool(
        False,
        help=
        "Enable vi (v) or Emacs (C-X C-E) shortcuts to open an external editor. "
        "This is in addition to the F2 binding, which is always enabled.").tag(
            config=True)

    handle_return = Any(
        None,
        help="Provide an alternative handler to be called when the user presses "
        "Return. This is an advanced option intended for debugging, which "
        "may be changed or removed in later releases.").tag(config=True)

    enable_history_search = Bool(
        True,
        help="Allows to enable/disable the prompt toolkit history search").tag(
            config=True)

    prompt_includes_vi_mode = Bool(
        True,
        help="Display the current vi mode (when using vi editing mode).").tag(
            config=True)

    @observe('term_title')
    def init_term_title(self, change=None):
        # Enable or disable the terminal title.
        if self.term_title:
            toggle_set_term_title(True)
            set_term_title(self.term_title_format.format(cwd=abbrev_cwd()))
        else:
            toggle_set_term_title(False)

    def restore_term_title(self):
        if self.term_title:
            restore_term_title()

    def init_display_formatter(self):
        super(TerminalInteractiveShell, self).init_display_formatter()
        # terminal only supports plain text
        self.display_formatter.active_types = ['text/plain']
        # disable `_ipython_display_`
        self.display_formatter.ipython_display_formatter.enabled = False

    def init_prompt_toolkit_cli(self):
        if self.simple_prompt:
            # Fall back to plain non-interactive output for tests.
            # This is very limited.
            def prompt():
                prompt_text = "".join(x[1]
                                      for x in self.prompts.in_prompt_tokens())
                lines = [input(prompt_text)]
                prompt_continuation = "".join(
                    x[1] for x in self.prompts.continuation_prompt_tokens())
                while self.check_complete('\n'.join(lines))[0] == 'incomplete':
                    lines.append(input(prompt_continuation))
                return '\n'.join(lines)

            self.prompt_for_code = prompt
            return

        # Set up keyboard shortcuts
        key_bindings = create_ipython_shortcuts(self)

        # Pre-populate history from IPython's history database
        history = InMemoryHistory()
        last_cell = u""
        for __, ___, cell in self.history_manager.get_tail(
                self.history_load_length, include_latest=True):
            # Ignore blank lines and consecutive duplicates
            cell = cell.rstrip()
            if cell and (cell != last_cell):
                history.append_string(cell)
                last_cell = cell

        self._style = self._make_style_from_name_or_cls(
            self.highlighting_style)
        self.style = DynamicStyle(lambda: self._style)

        editing_mode = getattr(EditingMode, self.editing_mode.upper())

        self.pt_loop = asyncio.new_event_loop()
        self.pt_app = PromptSession(
            auto_suggest=AutoSuggestFromHistory(),
            editing_mode=editing_mode,
            key_bindings=key_bindings,
            history=history,
            completer=IPythonPTCompleter(shell=self),
            enable_history_search=self.enable_history_search,
            style=self.style,
            include_default_pygments_style=False,
            mouse_support=self.mouse_support,
            enable_open_in_editor=self.extra_open_editor_shortcuts,
            color_depth=self.color_depth,
            tempfile_suffix=".py",
            **self._extra_prompt_options())

    def _make_style_from_name_or_cls(self, name_or_cls):
        """
        Small wrapper that make an IPython compatible style from a style name

        We need that to add style for prompt ... etc.
        """
        style_overrides = {}
        if name_or_cls == 'legacy':
            legacy = self.colors.lower()
            if legacy == 'linux':
                style_cls = get_style_by_name('monokai')
                style_overrides = _style_overrides_linux
            elif legacy == 'lightbg':
                style_overrides = _style_overrides_light_bg
                style_cls = get_style_by_name('pastie')
            elif legacy == 'neutral':
                # The default theme needs to be visible on both a dark background
                # and a light background, because we can't tell what the terminal
                # looks like. These tweaks to the default theme help with that.
                style_cls = get_style_by_name('default')
                style_overrides.update({
                    Token.Number:
                    '#ansigreen',
                    Token.Operator:
                    'noinherit',
                    Token.String:
                    '#ansiyellow',
                    Token.Name.Function:
                    '#ansiblue',
                    Token.Name.Class:
                    'bold #ansiblue',
                    Token.Name.Namespace:
                    'bold #ansiblue',
                    Token.Name.Variable.Magic:
                    '#ansiblue',
                    Token.Prompt:
                    '#ansigreen',
                    Token.PromptNum:
                    '#ansibrightgreen bold',
                    Token.OutPrompt:
                    '#ansired',
                    Token.OutPromptNum:
                    '#ansibrightred bold',
                })

                # Hack: Due to limited color support on the Windows console
                # the prompt colors will be wrong without this
                if os.name == 'nt':
                    style_overrides.update({
                        Token.Prompt: '#ansidarkgreen',
                        Token.PromptNum: '#ansigreen bold',
                        Token.OutPrompt: '#ansidarkred',
                        Token.OutPromptNum: '#ansired bold',
                    })
            elif legacy == 'nocolor':
                style_cls = _NoStyle
                style_overrides = {}
            else:
                raise ValueError('Got unknown colors: ', legacy)
        else:
            if isinstance(name_or_cls, str):
                style_cls = get_style_by_name(name_or_cls)
            else:
                style_cls = name_or_cls
            style_overrides = {
                Token.Prompt: '#ansigreen',
                Token.PromptNum: '#ansibrightgreen bold',
                Token.OutPrompt: '#ansired',
                Token.OutPromptNum: '#ansibrightred bold',
            }
        style_overrides.update(self.highlighting_style_overrides)
        style = merge_styles([
            style_from_pygments_cls(style_cls),
            style_from_pygments_dict(style_overrides),
        ])

        return style

    @property
    def pt_complete_style(self):
        return {
            'multicolumn': CompleteStyle.MULTI_COLUMN,
            'column': CompleteStyle.COLUMN,
            'readlinelike': CompleteStyle.READLINE_LIKE,
        }[self.display_completions]

    @property
    def color_depth(self):
        return (ColorDepth.TRUE_COLOR if self.true_color else None)

    def _extra_prompt_options(self):
        """
        Return the current layout option for the current Terminal InteractiveShell
        """
        def get_message():
            return PygmentsTokens(self.prompts.in_prompt_tokens())

        if self.editing_mode == 'emacs':
            # with emacs mode the prompt is (usually) static, so we call only
            # the function once. With VI mode it can toggle between [ins] and
            # [nor] so we can't precompute.
            # here I'm going to favor the default keybinding which almost
            # everybody uses to decrease CPU usage.
            # if we have issues with users with custom Prompts we can see how to
            # work around this.
            get_message = get_message()

        options = {
            'complete_in_thread':
            False,
            'lexer':
            IPythonPTLexer(),
            'reserve_space_for_menu':
            self.space_for_menu,
            'message':
            get_message,
            'prompt_continuation':
            (lambda width, lineno, is_soft_wrap: PygmentsTokens(
                self.prompts.continuation_prompt_tokens(width))),
            'multiline':
            True,
            'complete_style':
            self.pt_complete_style,

            # Highlight matching brackets, but only when this setting is
            # enabled, and only when the DEFAULT_BUFFER has the focus.
            'input_processors': [
                ConditionalProcessor(
                    processor=HighlightMatchingBracketProcessor(
                        chars='[](){}'),
                    filter=HasFocus(DEFAULT_BUFFER) & ~IsDone()
                    & Condition(lambda: self.highlight_matching_brackets))
            ],
        }
        if not PTK3:
            options['inputhook'] = self.inputhook

        return options

    def prompt_for_code(self):
        if self.rl_next_input:
            default = self.rl_next_input
            self.rl_next_input = None
        else:
            default = ''

        # In order to make sure that asyncio code written in the
        # interactive shell doesn't interfere with the prompt, we run the
        # prompt in a different event loop.
        # If we don't do this, people could spawn coroutine with a
        # while/true inside which will freeze the prompt.

        try:
            old_loop = asyncio.get_event_loop()
        except RuntimeError:
            # This happens when the user used `asyncio.run()`.
            old_loop = None

        asyncio.set_event_loop(self.pt_loop)
        try:
            with patch_stdout(raw=True):
                text = self.pt_app.prompt(default=default,
                                          **self._extra_prompt_options())
        finally:
            # Restore the original event loop.
            asyncio.set_event_loop(old_loop)

        return text

    def enable_win_unicode_console(self):
        # Since IPython 7.10 doesn't support python < 3.6 and PEP 528, Python uses the unicode APIs for the Windows
        # console by default, so WUC shouldn't be needed.
        from warnings import warn
        warn(
            "`enable_win_unicode_console` is deprecated since IPython 7.10, does not do anything and will be removed in the future",
            DeprecationWarning,
            stacklevel=2)

    def init_io(self):
        if sys.platform not in {'win32', 'cli'}:
            return

        import colorama
        colorama.init()

        # For some reason we make these wrappers around stdout/stderr.
        # For now, we need to reset them so all output gets coloured.
        # https://github.com/ipython/ipython/issues/8669
        # io.std* are deprecated, but don't show our own deprecation warnings
        # during initialization of the deprecated API.
        with warnings.catch_warnings():
            warnings.simplefilter('ignore', DeprecationWarning)
            io.stdout = io.IOStream(sys.stdout)
            io.stderr = io.IOStream(sys.stderr)

    def init_magics(self):
        super(TerminalInteractiveShell, self).init_magics()
        self.register_magics(TerminalMagics)

    def init_alias(self):
        # The parent class defines aliases that can be safely used with any
        # frontend.
        super(TerminalInteractiveShell, self).init_alias()

        # Now define aliases that only make sense on the terminal, because they
        # need direct access to the console in a way that we can't emulate in
        # GUI or web frontend
        if os.name == 'posix':
            for cmd in ('clear', 'more', 'less', 'man'):
                self.alias_manager.soft_define_alias(cmd, cmd)

    def __init__(self, *args, **kwargs):
        super(TerminalInteractiveShell, self).__init__(*args, **kwargs)
        self.init_prompt_toolkit_cli()
        self.init_term_title()
        self.keep_running = True

    def ask_exit(self):
        self.keep_running = False

    rl_next_input = None

    def interact(self, display_banner=DISPLAY_BANNER_DEPRECATED):

        if display_banner is not DISPLAY_BANNER_DEPRECATED:
            warn(
                'interact `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.',
                DeprecationWarning,
                stacklevel=2)

        self.keep_running = True
        while self.keep_running:
            print(self.separate_in, end='')

            try:
                code = self.prompt_for_code()
            except EOFError:
                if (not self.confirm_exit) \
                        or self.ask_yes_no('Do you really want to exit ([y]/n)?','y','n'):
                    self.ask_exit()

            else:
                if code:
                    self.run_cell(code, store_history=True)

    def mainloop(self, display_banner=DISPLAY_BANNER_DEPRECATED):
        # An extra layer of protection in case someone mashing Ctrl-C breaks
        # out of our internal code.
        if display_banner is not DISPLAY_BANNER_DEPRECATED:
            warn(
                'mainloop `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.',
                DeprecationWarning,
                stacklevel=2)
        while True:
            try:
                self.interact()
                break
            except KeyboardInterrupt as e:
                print("\n%s escaped interact()\n" % type(e).__name__)
            finally:
                # An interrupt during the eventloop will mess up the
                # internal state of the prompt_toolkit library.
                # Stopping the eventloop fixes this, see
                # https://github.com/ipython/ipython/pull/9867
                if hasattr(self, '_eventloop'):
                    self._eventloop.stop()

                self.restore_term_title()

        # try to call some at-exit operation optimistically as some things can't
        # be done during interpreter shutdown. this is technically inaccurate as
        # this make mainlool not re-callable, but that should be a rare if not
        # in existent use case.

        self._atexit_once()

    _inputhook = None

    def inputhook(self, context):
        if self._inputhook is not None:
            self._inputhook(context)

    active_eventloop = None

    def enable_gui(self, gui=None):
        if gui and (gui != 'inline'):
            self.active_eventloop, self._inputhook =\
                get_inputhook_name_and_func(gui)
        else:
            self.active_eventloop = self._inputhook = None

        # For prompt_toolkit 3.0. We have to create an asyncio event loop with
        # this inputhook.
        if PTK3:
            import asyncio
            from prompt_toolkit.eventloop import new_eventloop_with_inputhook

            if gui == 'asyncio':
                # When we integrate the asyncio event loop, run the UI in the
                # same event loop as the rest of the code. don't use an actual
                # input hook. (Asyncio is not made for nesting event loops.)
                self.pt_loop = asyncio.get_event_loop()

            elif self._inputhook:
                # If an inputhook was set, create a new asyncio event loop with
                # this inputhook for the prompt.
                self.pt_loop = new_eventloop_with_inputhook(self._inputhook)
            else:
                # When there's no inputhook, run the prompt in a separate
                # asyncio event loop.
                self.pt_loop = asyncio.new_event_loop()

    # Run !system commands directly, not through pipes, so terminal programs
    # work correctly.
    system = InteractiveShell.system_raw

    def auto_rewrite_input(self, cmd):
        """Overridden from the parent class to use fancy rewriting prompt"""
        if not self.show_rewritten_input:
            return

        tokens = self.prompts.rewrite_prompt_tokens()
        if self.pt_app:
            print_formatted_text(PygmentsTokens(tokens),
                                 end='',
                                 style=self.pt_app.app.style)
            print(cmd)
        else:
            prompt = ''.join(s for t, s in tokens)
            print(prompt, cmd, sep='')

    _prompts_before = None

    def switch_doctest_mode(self, mode):
        """Switch prompts to classic for %doctest_mode"""
        if mode:
            self._prompts_before = self.prompts
            self.prompts = ClassicPrompts(self)
        elif self._prompts_before:
            self.prompts = self._prompts_before
            self._prompts_before = None
Exemplo n.º 8
0
class Polyline(GMapsWidgetMixin, widgets.Widget):
    """
    Widget representing a linear overlay of connected line segments on the map

    Add this polyline to a map via the :func:`gmaps.drawing_layer`
    function, or by passing it directly to the ``.features`` array
    of a :class:`gmaps.Drawing` instance.

    :Examples:

    >>> fig = gmaps.figure()
    >>> drawing = gmaps.drawing_layer(features=[
         gmaps.Polyline(
            [(46.72, 6.06), (46.48, 6.49), (46.79, 6.91)],
            stroke_color='blue'
        )
    ])
    >>> fig.add_layer(drawing)

    You can also add a polyline to an existing :class:`gmaps.Drawing`
    instance:

    >>> fig = gmaps.figure()
    >>> drawing = gmaps.drawing_layer()
    >>> fig.add_layer(drawing)
    >>> fig # display the figure

    You can now add polylines directly on the map:

    >>> drawing.features = [
         gmaps.Polyline(
             [(46.72, 6.06), (46.48, 6.49), (46.79, 6.91)]
             stroke_color='blue'
         )
    ]

    :param path:
        List of (latitude, longitude) pairs denoting each point on the polyline.
        Latitudes are expressed as a float between -90 (corresponding to 90
        degrees south) and +90 (corresponding to 90 degrees north). Longitudes
        are expressed as a float between -180 (corresponding to 180 degrees
        west) and +180 (corresponding to 180 degrees east).
    :type path: list of tuples of floats

    {stroke_options_params}

    {fill_options_params}
    """
    _view_name = Unicode('PolylineView').tag(sync=True)
    _model_name = Unicode('PolylineModel').tag(sync=True)
    path = List(geotraitlets.Point(), minlen=3).tag(sync=True)
    stroke_color = geotraitlets.ColorAlpha(
        allow_none=False, default_value=DEFAULT_STROKE_COLOR).tag(sync=True)
    stroke_weight = Float(min=0.0, allow_none=False,
                          default_value=2.0).tag(sync=True)
    stroke_opacity = geotraitlets.StrokeOpacity().tag(sync=True)

    def __init__(self,
                 path,
                 stroke_color=DEFAULT_STROKE_COLOR,
                 stroke_weight=2.0,
                 stroke_opacity=geotraitlets.StrokeOpacity.default_value):
        kwargs = dict(
            path=path,
            stroke_color=stroke_color,
            stroke_weight=stroke_weight,
            stroke_opacity=stroke_opacity,
        )
        super(Polyline, self).__init__(**kwargs)
Exemplo n.º 9
0
class Circle(GMapsWidgetMixin, widgets.Widget):
    """
    Widget representing a closed circle on a map

    Add this cicle to a map via the :func:`gmaps.drawing_layer`
    function, or by passing it directly to the ``.features`` array
    of a :class:`gmaps.Drawing` instance

    :Examples:

    >>> fig = gmaps.figure()
    >>> drawing = gmaps.drawing_layer(features=[
         gmaps.Circle(
            radius=20000,  # in meters
            center=(46.656, 6.111),
            stroke_color='red', fill_color=(255, 0, 132)
        )
    ])
    >>> fig.add_layer(drawing)

    You can also add circles to an existing :class:`gmaps.Drawing`
    instance:

    >>> fig = gmaps.figure()
    >>> drawing = gmaps.drawing_layer()
    >>> fig.add_layer(drawing)
    >>> fig # display the figure

    You can now add polygons directly on the map:

    >>> drawing.features = [
         gmaps.Circle(
            radius=20000,  # in meters
            center=(46.656, 6.111),
            stroke_color='red', fill_color=(255, 0, 132)
        )
    ]

    :param center:
        (latitude, longitude) pair denoting the center of the
        circle. Latitudes are expressed as a float between -90 (
        corresponding to 90 degrees south) and +90 (corresponding to
        90 degrees north). Longitudes are expressed as a float between
        -180 (corresponding to 180 degrees west) and +180
        (corresponding to 180 degrees east).
    :type center: pair of floats

    :param radius:
        Radius of the circle, in meters.
    :type radius: float

    {stroke_options_params}

    {fill_options_params}
    """
    _view_name = Unicode('CircleView').tag(sync=True)
    _model_name = Unicode('CircleModel').tag(sync=True)
    radius = Float(min=0.0).tag(sync=True)
    center = geotraitlets.Point().tag(sync=True)
    stroke_color = geotraitlets.ColorAlpha(
        allow_none=False, default_value=DEFAULT_STROKE_COLOR).tag(sync=True)
    stroke_weight = Float(min=0.0, allow_none=False,
                          default_value=2.0).tag(sync=True)
    stroke_opacity = geotraitlets.StrokeOpacity().tag(sync=True)
    fill_color = geotraitlets.ColorAlpha(
        allow_none=False, default_value=DEFAULT_FILL_COLOR).tag(sync=True)
    fill_opacity = geotraitlets.FillOpacity().tag(sync=True)

    def __init__(self,
                 center,
                 radius,
                 stroke_color=DEFAULT_STROKE_COLOR,
                 stroke_weight=2.0,
                 stroke_opacity=geotraitlets.StrokeOpacity.default_value,
                 fill_color=DEFAULT_FILL_COLOR,
                 fill_opacity=geotraitlets.FillOpacity.default_value):
        kwargs = dict(center=center,
                      radius=radius,
                      stroke_color=stroke_color,
                      stroke_weight=stroke_weight,
                      stroke_opacity=stroke_opacity,
                      fill_color=fill_color,
                      fill_opacity=fill_opacity)
        super(Circle, self).__init__(**kwargs)
Exemplo n.º 10
0
class IPEngineApp(BaseParallelApplication):

    name = 'ipengine'
    description = _description
    examples = _examples
    classes = List([ZMQInteractiveShell, ProfileDir, Session, EngineFactory, Kernel, MPI])

    startup_script = Unicode(u'', config=True,
        help='specify a script to be run at startup')
    startup_command = Unicode('', config=True,
            help='specify a command to be run at startup')

    url_file = Unicode(u'', config=True,
        help="""The full location of the file containing the connection information for
        the controller. If this is not given, the file must be in the
        security directory of the cluster directory.  This location is
        resolved using the `profile` or `profile_dir` options.""",
        )
    wait_for_url_file = Float(5, config=True,
        help="""The maximum number of seconds to wait for url_file to exist.
        This is useful for batch-systems and shared-filesystems where the
        controller and engine are started at the same time and it
        may take a moment for the controller to write the connector files.""")

    url_file_name = Unicode(u'ipcontroller-engine.json', config=True)

    def _cluster_id_changed(self, name, old, new):
        if new:
            base = 'ipcontroller-%s' % new
        else:
            base = 'ipcontroller'
        self.url_file_name = "%s-engine.json" % base

    log_url = Unicode('', config=True,
        help="""The URL for the iploggerapp instance, for forwarding
        logging to a central location.""")
    
    # an IPKernelApp instance, used to setup listening for shell frontends
    kernel_app = Instance(IPKernelApp, allow_none=True)

    aliases = Dict(aliases)
    flags = Dict(flags)
    
    @property
    def kernel(self):
        """allow access to the Kernel object, so I look like IPKernelApp"""
        return self.engine.kernel

    def find_url_file(self):
        """Set the url file.

        Here we don't try to actually see if it exists for is valid as that
        is hadled by the connection logic.
        """
        # Find the actual controller key file
        if not self.url_file:
            self.url_file = os.path.join(
                self.profile_dir.security_dir,
                self.url_file_name
            )
    
    def load_connector_file(self):
        """load config from a JSON connector file,
        at a *lower* priority than command-line/config files.
        """
        
        self.log.info("Loading url_file %r", self.url_file)
        config = self.config
        
        with open(self.url_file) as f:
            num_tries = 0
            max_tries = 5
            d = ""
            while not d:
                try:
                    d = json.loads(f.read())
                except ValueError:
                    if num_tries > max_tries:
                        raise
                    num_tries += 1
                    time.sleep(0.5)
        
        # allow hand-override of location for disambiguation
        # and ssh-server
        if 'EngineFactory.location' not in config:
            config.EngineFactory.location = d['location']
        if 'EngineFactory.sshserver' not in config:
            config.EngineFactory.sshserver = d.get('ssh')
        
        location = config.EngineFactory.location
        
        proto, ip = d['interface'].split('://')
        ip = disambiguate_ip_address(ip, location)
        d['interface'] = '%s://%s' % (proto, ip)
        
        # DO NOT allow override of basic URLs, serialization, or key
        # JSON file takes top priority there
        config.Session.key = cast_bytes(d['key'])
        config.Session.signature_scheme = d['signature_scheme']
        
        config.EngineFactory.url = d['interface'] + ':%i' % d['registration']
        
        config.Session.packer = d['pack']
        config.Session.unpacker = d['unpack']
        
        self.log.debug("Config changed:")
        self.log.debug("%r", config)
        self.connection_info = d
    
    def bind_kernel(self, **kwargs):
        """Promote engine to listening kernel, accessible to frontends."""
        if self.kernel_app is not None:
            return
        
        self.log.info("Opening ports for direct connections as an IPython kernel")
        
        kernel = self.kernel
        
        kwargs.setdefault('config', self.config)
        kwargs.setdefault('log', self.log)
        kwargs.setdefault('profile_dir', self.profile_dir)
        kwargs.setdefault('session', self.engine.session)
        
        app = self.kernel_app = IPKernelApp(**kwargs)
        
        # allow IPKernelApp.instance():
        IPKernelApp._instance = app
        
        app.init_connection_file()
        # relevant contents of init_sockets:
        
        app.shell_port = app._bind_socket(kernel.shell_streams[0], app.shell_port)
        app.log.debug("shell ROUTER Channel on port: %i", app.shell_port)
        
        iopub_socket = kernel.iopub_socket
        # ipykernel 4.3 iopub_socket is an IOThread wrapper:
        if hasattr(iopub_socket, 'socket'):
            iopub_socket = iopub_socket.socket
        
        app.iopub_port = app._bind_socket(iopub_socket, app.iopub_port)
        app.log.debug("iopub PUB Channel on port: %i", app.iopub_port)
        
        kernel.stdin_socket = self.engine.context.socket(zmq.ROUTER)
        app.stdin_port = app._bind_socket(kernel.stdin_socket, app.stdin_port)
        app.log.debug("stdin ROUTER Channel on port: %i", app.stdin_port)
        
        # start the heartbeat, and log connection info:
        
        app.init_heartbeat()
        
        app.log_connection_info()
        app.connection_dir = self.profile_dir.security_dir
        app.write_connection_file()
        
    
    def init_engine(self):
        # This is the working dir by now.
        sys.path.insert(0, '')
        config = self.config
        # print config
        self.find_url_file()
        
        # was the url manually specified?
        keys = set(self.config.EngineFactory.keys())
        keys = keys.union(set(self.config.RegistrationFactory.keys()))
        
        if self.wait_for_url_file and not os.path.exists(self.url_file):
            self.log.warn("url_file %r not found", self.url_file)
            self.log.warn("Waiting up to %.1f seconds for it to arrive.", self.wait_for_url_file)
            tic = time.time()
            while not os.path.exists(self.url_file) and (time.time()-tic < self.wait_for_url_file):
                # wait for url_file to exist, or until time limit
                time.sleep(0.1)
            
        if os.path.exists(self.url_file):
            self.load_connector_file()
        else:
            self.log.fatal("Fatal: url file never arrived: %s", self.url_file)
            self.exit(1)
        
        exec_lines = []
        for app in ('IPKernelApp', 'InteractiveShellApp'):
            if '%s.exec_lines' % app in config:
                exec_lines = config[app].exec_lines
                break
        
        exec_files = []
        for app in ('IPKernelApp', 'InteractiveShellApp'):
            if '%s.exec_files' % app in config:
                exec_files = config[app].exec_files
                break
        
        config.IPKernelApp.exec_lines = exec_lines
        config.IPKernelApp.exec_files = exec_files
        
        if self.startup_script:
            exec_files.append(self.startup_script)
        if self.startup_command:
            exec_lines.append(self.startup_command)

        # Create the underlying shell class and Engine
        # shell_class = import_item(self.master_config.Global.shell_class)
        # print self.config
        try:
            self.engine = EngineFactory(config=config, log=self.log,
                            connection_info=self.connection_info,
                        )
        except:
            self.log.error("Couldn't start the Engine", exc_info=True)
            self.exit(1)
    
    def forward_logging(self):
        if self.log_url:
            self.log.info("Forwarding logging to %s", self.log_url)
            context = self.engine.context
            lsock = context.socket(zmq.PUB)
            lsock.connect(self.log_url)
            handler = EnginePUBHandler(self.engine, lsock)
            handler.setLevel(self.log_level)
            self.log.addHandler(handler)
    
    def init_mpi(self):
        global mpi
        self.mpi = MPI(parent=self)

        mpi_import_statement = self.mpi.init_script
        if mpi_import_statement:
            try:
                self.log.info("Initializing MPI:")
                self.log.info(mpi_import_statement)
                exec(mpi_import_statement, globals())
            except:
                mpi = None
        else:
            mpi = None

    @catch_config_error
    def initialize(self, argv=None):
        super(IPEngineApp, self).initialize(argv)
        self.init_mpi()
        self.init_engine()
        self.forward_logging()
    
    def start(self):
        self.engine.start()
        try:
            self.engine.loop.start()
        except KeyboardInterrupt:
            self.log.critical("Engine Interrupted, shutting down...\n")
Exemplo n.º 11
0
class Line(GMapsWidgetMixin, widgets.Widget):
    """
    Widget representing a single line on a map

    Add this line to a map via the :func:`gmaps.drawing_layer` function, or by
    passing it directly to the ``.features`` array of an existing instance of
    :class:`gmaps.Drawing`.

    :Examples:

    >>> fig = gmaps.figure()
    >>> drawing = gmaps.drawing_layer(features=[
         gmaps.Line((46.44, 5.24), (46.23, 5.86), stroke_color='green'),
         gmaps.Line((48.44, 1.32), (47.13, 3.91), stroke_weight=5.0)
    ])
    >>> fig.add_layer(drawing)

    You can also add a line to an existing :class:`gmaps.Drawing`
    instance:

    >>> fig = gmaps.figure()
    >>> drawing = gmaps.drawing_layer()
    >>> fig.add_layer(drawing)
    >>> fig # display the figure

    You can now add lines directly on the map:

    >>> drawing.features = [
         gmaps.Line((46.44, 5.24), (46.23, 5.86), stroke_color='green'),
         gmaps.Line((48.44, 1.32), (47.13, 3.91), stroke_weight=5.0)
    ]

    :param start:
        (latitude, longitude) pair denoting the start of the line. Latitudes
        are expressed as a float between -90 (corresponding to 90 degrees
        south) and +90 (corresponding to 90 degrees north). Longitudes are
        expressed as a float between -180 (corresponding to 180 degrees west)
        and +180 (corresponding to 180 degrees east).
    :type start: tuple of floats

    :param end:
        (latitude, longitude) pair denoting the end of the line. Latitudes
        are expressed as a float between -90 (corresponding to 90 degrees
        south) and +90 (corresponding to 90 degrees north). Longitudes are
        expressed as a float between -180 (corresponding to 180 degrees west)
        and +180 (corresponding to 180 degrees east).
    :type start: tuple of floats

    {stroke_options_params}
    """
    _view_name = Unicode('LineView').tag(sync=True)
    _model_name = Unicode('LineModel').tag(sync=True)
    start = geotraitlets.Point().tag(sync=True)
    end = geotraitlets.Point().tag(sync=True)
    stroke_color = geotraitlets.ColorAlpha(
        allow_none=False, default_value=DEFAULT_STROKE_COLOR).tag(sync=True)
    stroke_weight = Float(min=0.0, allow_none=False,
                          default_value=2.0).tag(sync=True)
    stroke_opacity = geotraitlets.StrokeOpacity().tag(sync=True)

    def __init__(self,
                 start,
                 end,
                 stroke_color=DEFAULT_STROKE_COLOR,
                 stroke_weight=2.0,
                 stroke_opacity=geotraitlets.StrokeOpacity.default_value):
        kwargs = dict(start=start,
                      end=end,
                      stroke_color=stroke_color,
                      stroke_weight=stroke_weight,
                      stroke_opacity=stroke_opacity)
        super(Line, self).__init__(**kwargs)
Exemplo n.º 12
0
class JupyterHub(Application):
    """An Application for starting a Multi-User Jupyter Notebook server."""
    name = 'jupyterhub'
    version = jupyterhub.__version__

    description = """Start a multi-user Jupyter Notebook server
    
    Spawns a configurable-http-proxy and multi-user Hub,
    which authenticates users and spawns single-user Notebook servers
    on behalf of users.
    """

    examples = """
    
    generate default config file:
    
        jupyterhub --generate-config -f /etc/jupyterhub/jupyterhub.py
    
    spawn the server on 10.0.1.2:443 with https:
    
        jupyterhub --ip 10.0.1.2 --port 443 --ssl-key my_ssl.key --ssl-cert my_ssl.cert
    """

    aliases = Dict(aliases)
    flags = Dict(flags)

    subcommands = {'token': (NewToken, "Generate an API token for a user")}

    classes = List([
        Spawner,
        LocalProcessSpawner,
        Authenticator,
        PAMAuthenticator,
    ])

    config_file = Unicode(
        'jupyterhub_config.py',
        config=True,
        help="The config file to load",
    )
    generate_config = Bool(
        False,
        config=True,
        help="Generate default config file",
    )
    answer_yes = Bool(
        False,
        config=True,
        help="Answer yes to any questions (e.g. confirm overwrite)")
    pid_file = Unicode('',
                       config=True,
                       help="""File to write PID
        Useful for daemonizing jupyterhub.
        """)
    cookie_max_age_days = Float(
        14,
        config=True,
        help="""Number of days for a login cookie to be valid.
        Default is two weeks.
        """)
    last_activity_interval = Integer(
        300,
        config=True,
        help=
        "Interval (in seconds) at which to update last-activity timestamps.")
    proxy_check_interval = Integer(
        30,
        config=True,
        help="Interval (in seconds) at which to check if the proxy is running."
    )

    data_files_path = Unicode(
        DATA_FILES_PATH,
        config=True,
        help=
        "The location of jupyterhub data files (e.g. /usr/local/share/jupyter/hub)"
    )

    template_paths = List(
        config=True,
        help="Paths to search for jinja templates.",
    )

    def _template_paths_default(self):
        return [os.path.join(self.data_files_path, 'templates')]

    confirm_no_ssl = Bool(
        False,
        config=True,
        help="""Confirm that JupyterHub should be run without SSL.
        This is **NOT RECOMMENDED** unless SSL termination is being handled by another layer.
        """)
    ssl_key = Unicode(
        '',
        config=True,
        help="""Path to SSL key file for the public facing interface of the proxy
        
        Use with ssl_cert
        """)
    ssl_cert = Unicode(
        '',
        config=True,
        help=
        """Path to SSL certificate file for the public facing interface of the proxy
        
        Use with ssl_key
        """)
    ip = Unicode(
        '',
        config=True,
        help="The public facing ip of the whole application (the proxy)")

    subdomain_host = Unicode(
        '',
        config=True,
        help="""Run single-user servers on subdomains of this host.

        This should be the full https://hub.domain.tld[:port]

        Provides additional cross-site protections for javascript served by single-user servers.

        Requires <username>.hub.domain.tld to resolve to the same host as hub.domain.tld.

        In general, this is most easily achieved with wildcard DNS.

        When using SSL (i.e. always) this also requires a wildcard SSL certificate.
        """)

    def _subdomain_host_changed(self, name, old, new):
        if new and '://' not in new:
            # host should include '://'
            # if not specified, assume https: You have to be really explicit about HTTP!
            self.subdomain_host = 'https://' + new

    port = Integer(8000,
                   config=True,
                   help="The public facing port of the proxy")
    base_url = URLPrefix('/',
                         config=True,
                         help="The base URL of the entire application")
    logo_file = Unicode(
        '',
        config=True,
        help=
        "Specify path to a logo image to override the Jupyter logo in the banner."
    )

    def _logo_file_default(self):
        return os.path.join(self.data_files_path, 'static', 'images',
                            'jupyter.png')

    jinja_environment_options = Dict(
        config=True,
        help="Supply extra arguments that will be passed to Jinja environment."
    )

    proxy_cmd = Command('configurable-http-proxy',
                        config=True,
                        help="""The command to start the http proxy.
        
        Only override if configurable-http-proxy is not on your PATH
        """)
    debug_proxy = Bool(False,
                       config=True,
                       help="show debug output in configurable-http-proxy")
    proxy_auth_token = Unicode(config=True,
                               help="""The Proxy Auth token.

        Loaded from the CONFIGPROXY_AUTH_TOKEN env variable by default.
        """)

    def _proxy_auth_token_default(self):
        token = os.environ.get('CONFIGPROXY_AUTH_TOKEN', None)
        if not token:
            self.log.warn('\n'.join([
                "",
                "Generating CONFIGPROXY_AUTH_TOKEN. Restarting the Hub will require restarting the proxy.",
                "Set CONFIGPROXY_AUTH_TOKEN env or JupyterHub.proxy_auth_token config to avoid this message.",
                "",
            ]))
            token = orm.new_token()
        return token

    proxy_api_ip = Unicode('127.0.0.1',
                           config=True,
                           help="The ip for the proxy API handlers")
    proxy_api_port = Integer(config=True,
                             help="The port for the proxy API handlers")

    def _proxy_api_port_default(self):
        return self.port + 1

    hub_port = Integer(8081, config=True, help="The port for this process")
    hub_ip = Unicode('127.0.0.1', config=True, help="The ip for this process")
    hub_prefix = URLPrefix(
        '/hub/',
        config=True,
        help="The prefix for the hub server. Must not be '/'")

    def _hub_prefix_default(self):
        return url_path_join(self.base_url, '/hub/')

    def _hub_prefix_changed(self, name, old, new):
        if new == '/':
            raise TraitError("'/' is not a valid hub prefix")
        if not new.startswith(self.base_url):
            self.hub_prefix = url_path_join(self.base_url, new)

    cookie_secret = Bytes(config=True,
                          env='JPY_COOKIE_SECRET',
                          help="""The cookie secret to use to encrypt cookies.

        Loaded from the JPY_COOKIE_SECRET env variable by default.
        """)

    cookie_secret_file = Unicode(
        'jupyterhub_cookie_secret',
        config=True,
        help="""File in which to store the cookie secret.""")

    authenticator_class = Type(PAMAuthenticator,
                               Authenticator,
                               config=True,
                               help="""Class for authenticating users.
        
        This should be a class with the following form:
        
        - constructor takes one kwarg: `config`, the IPython config object.
        
        - is a tornado.gen.coroutine
        - returns username on success, None on failure
        - takes two arguments: (handler, data),
          where `handler` is the calling web.RequestHandler,
          and `data` is the POST form data from the login page.
        """)

    authenticator = Instance(Authenticator)

    def _authenticator_default(self):
        return self.authenticator_class(parent=self, db=self.db)

    # class for spawning single-user servers
    spawner_class = Type(
        LocalProcessSpawner,
        Spawner,
        config=True,
        help="""The class to use for spawning single-user servers.
        
        Should be a subclass of Spawner.
        """)

    db_url = Unicode(
        'sqlite:///jupyterhub.sqlite',
        config=True,
        help="url for the database. e.g. `sqlite:///jupyterhub.sqlite`")

    def _db_url_changed(self, name, old, new):
        if '://' not in new:
            # assume sqlite, if given as a plain filename
            self.db_url = 'sqlite:///%s' % new

    db_kwargs = Dict(
        config=True,
        help="""Include any kwargs to pass to the database connection.
        See sqlalchemy.create_engine for details.
        """)

    reset_db = Bool(False, config=True, help="Purge and reset the database.")
    debug_db = Bool(
        False,
        config=True,
        help="log all database transactions. This has A LOT of output")
    session_factory = Any()

    users = Instance(UserDict)

    def _users_default(self):
        assert self.tornado_settings
        return UserDict(db_factory=lambda: self.db,
                        settings=self.tornado_settings)

    admin_access = Bool(
        False,
        config=True,
        help="""Grant admin users permission to access single-user servers.
        
        Users should be properly informed if this is enabled.
        """)
    admin_users = Set(
        config=True,
        help="""DEPRECATED, use Authenticator.admin_users instead.""")

    tornado_settings = Dict(config=True)

    cleanup_servers = Bool(
        True,
        config=True,
        help="""Whether to shutdown single-user servers when the Hub shuts down.
        
        Disable if you want to be able to teardown the Hub while leaving the single-user servers running.
        
        If both this and cleanup_proxy are False, sending SIGINT to the Hub will
        only shutdown the Hub, leaving everything else running.
        
        The Hub should be able to resume from database state.
        """)

    cleanup_proxy = Bool(
        True,
        config=True,
        help="""Whether to shutdown the proxy when the Hub shuts down.
        
        Disable if you want to be able to teardown the Hub while leaving the proxy running.
        
        Only valid if the proxy was starting by the Hub process.
        
        If both this and cleanup_servers are False, sending SIGINT to the Hub will
        only shutdown the Hub, leaving everything else running.
        
        The Hub should be able to resume from database state.
        """)

    handlers = List()

    _log_formatter_cls = CoroutineLogFormatter
    http_server = None
    proxy_process = None
    io_loop = None

    def _log_level_default(self):
        return logging.INFO

    def _log_datefmt_default(self):
        """Exclude date from default date format"""
        return "%Y-%m-%d %H:%M:%S"

    def _log_format_default(self):
        """override default log format to include time"""
        return "%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s %(module)s:%(lineno)d]%(end_color)s %(message)s"

    extra_log_file = Unicode("",
                             config=True,
                             help="Set a logging.FileHandler on this file.")
    extra_log_handlers = List(
        Instance(logging.Handler),
        config=True,
        help="Extra log handlers to set on JupyterHub logger",
    )

    def init_logging(self):
        # This prevents double log messages because tornado use a root logger that
        # self.log is a child of. The logging module dipatches log messages to a log
        # and all of its ancenstors until propagate is set to False.
        self.log.propagate = False

        if self.extra_log_file:
            self.extra_log_handlers.append(
                logging.FileHandler(self.extra_log_file))

        _formatter = self._log_formatter_cls(
            fmt=self.log_format,
            datefmt=self.log_datefmt,
        )
        for handler in self.extra_log_handlers:
            if handler.formatter is None:
                handler.setFormatter(_formatter)
            self.log.addHandler(handler)

        # hook up tornado 3's loggers to our app handlers
        for log in (app_log, access_log, gen_log):
            # ensure all log statements identify the application they come from
            log.name = self.log.name
        logger = logging.getLogger('tornado')
        logger.propagate = True
        logger.parent = self.log
        logger.setLevel(self.log.level)

    def init_ports(self):
        if self.hub_port == self.port:
            raise TraitError(
                "The hub and proxy cannot both listen on port %i" % self.port)
        if self.hub_port == self.proxy_api_port:
            raise TraitError(
                "The hub and proxy API cannot both listen on port %i" %
                self.hub_port)
        if self.proxy_api_port == self.port:
            raise TraitError(
                "The proxy's public and API ports cannot both be %i" %
                self.port)

    @staticmethod
    def add_url_prefix(prefix, handlers):
        """add a url prefix to handlers"""
        for i, tup in enumerate(handlers):
            lis = list(tup)
            lis[0] = url_path_join(prefix, tup[0])
            handlers[i] = tuple(lis)
        return handlers

    def init_handlers(self):
        h = []
        # load handlers from the authenticator
        h.extend(self.authenticator.get_handlers(self))
        # set default handlers
        h.extend(handlers.default_handlers)
        h.extend(apihandlers.default_handlers)

        h.append((r'/logo', LogoHandler, {'path': self.logo_file}))
        self.handlers = self.add_url_prefix(self.hub_prefix, h)
        # some extra handlers, outside hub_prefix
        self.handlers.extend([
            (r"%s" % self.hub_prefix.rstrip('/'), web.RedirectHandler, {
                "url": self.hub_prefix,
                "permanent": False,
            }),
            (r"(?!%s).*" % self.hub_prefix, handlers.PrefixRedirectHandler),
            (r'(.*)', handlers.Template404),
        ])

    def _check_db_path(self, path):
        """More informative log messages for failed filesystem access"""
        path = os.path.abspath(path)
        parent, fname = os.path.split(path)
        user = getuser()
        if not os.path.isdir(parent):
            self.log.error("Directory %s does not exist", parent)
        if os.path.exists(parent) and not os.access(parent, os.W_OK):
            self.log.error("%s cannot create files in %s", user, parent)
        if os.path.exists(path) and not os.access(path, os.W_OK):
            self.log.error("%s cannot edit %s", user, path)

    def init_secrets(self):
        trait_name = 'cookie_secret'
        trait = self.traits()[trait_name]
        env_name = trait.get_metadata('env')
        secret_file = os.path.abspath(
            os.path.expanduser(self.cookie_secret_file))
        secret = self.cookie_secret
        secret_from = 'config'
        # load priority: 1. config, 2. env, 3. file
        if not secret and os.environ.get(env_name):
            secret_from = 'env'
            self.log.info("Loading %s from env[%s]", trait_name, env_name)
            secret = binascii.a2b_hex(os.environ[env_name])
        if not secret and os.path.exists(secret_file):
            secret_from = 'file'
            perm = os.stat(secret_file).st_mode
            if perm & 0o077:
                self.log.error("Bad permissions on %s", secret_file)
            else:
                self.log.info("Loading %s from %s", trait_name, secret_file)
                with open(secret_file) as f:
                    b64_secret = f.read()
                try:
                    secret = binascii.a2b_base64(b64_secret)
                except Exception as e:
                    self.log.error("%s does not contain b64 key: %s",
                                   secret_file, e)
        if not secret:
            secret_from = 'new'
            self.log.debug("Generating new %s", trait_name)
            secret = os.urandom(SECRET_BYTES)

        if secret_file and secret_from == 'new':
            # if we generated a new secret, store it in the secret_file
            self.log.info("Writing %s to %s", trait_name, secret_file)
            b64_secret = binascii.b2a_base64(secret).decode('ascii')
            with open(secret_file, 'w') as f:
                f.write(b64_secret)
            try:
                os.chmod(secret_file, 0o600)
            except OSError:
                self.log.warn("Failed to set permissions on %s", secret_file)
        # store the loaded trait value
        self.cookie_secret = secret

    # thread-local storage of db objects
    _local = Instance(threading.local, ())

    @property
    def db(self):
        if not hasattr(self._local, 'db'):
            self._local.db = scoped_session(self.session_factory)()
        return self._local.db

    @property
    def hub(self):
        if not getattr(self._local, 'hub', None):
            q = self.db.query(orm.Hub)
            assert q.count() <= 1
            self._local.hub = q.first()
            if self.subdomain_host and self._local.hub:
                self._local.hub.host = self.subdomain_host
        return self._local.hub

    @hub.setter
    def hub(self, hub):
        self._local.hub = hub
        if hub and self.subdomain_host:
            hub.host = self.subdomain_host

    @property
    def proxy(self):
        if not getattr(self._local, 'proxy', None):
            q = self.db.query(orm.Proxy)
            assert q.count() <= 1
            p = self._local.proxy = q.first()
            if p:
                p.auth_token = self.proxy_auth_token
        return self._local.proxy

    @proxy.setter
    def proxy(self, proxy):
        self._local.proxy = proxy

    def init_db(self):
        """Create the database connection"""
        self.log.debug("Connecting to db: %s", self.db_url)
        try:
            self.session_factory = orm.new_session_factory(self.db_url,
                                                           reset=self.reset_db,
                                                           echo=self.debug_db,
                                                           **self.db_kwargs)
            # trigger constructing thread local db property
            _ = self.db
        except OperationalError as e:
            self.log.error("Failed to connect to db: %s", self.db_url)
            self.log.debug("Database error was:", exc_info=True)
            if self.db_url.startswith('sqlite:///'):
                self._check_db_path(self.db_url.split(':///', 1)[1])
            self.exit(1)

    def init_hub(self):
        """Load the Hub config into the database"""
        self.hub = self.db.query(orm.Hub).first()
        if self.hub is None:
            self.hub = orm.Hub(server=orm.Server(
                ip=self.hub_ip,
                port=self.hub_port,
                base_url=self.hub_prefix,
                cookie_name='jupyter-hub-token',
            ))
            self.db.add(self.hub)
        else:
            server = self.hub.server
            server.ip = self.hub_ip
            server.port = self.hub_port
            server.base_url = self.hub_prefix
        if self.subdomain_host:
            if not self.subdomain_host:
                raise ValueError(
                    "Must specify subdomain_host when using subdomains."
                    " This should be the public domain[:port] of the Hub.")

        self.db.commit()

    @gen.coroutine
    def init_users(self):
        """Load users into and from the database"""
        db = self.db

        if self.admin_users and not self.authenticator.admin_users:
            self.log.warn("\nJupyterHub.admin_users is deprecated."
                          "\nUse Authenticator.admin_users instead.")
            self.authenticator.admin_users = self.admin_users
        admin_users = [
            self.authenticator.normalize_username(name)
            for name in self.authenticator.admin_users
        ]
        for username in admin_users:
            if not self.authenticator.validate_username(username):
                raise ValueError("username %r is not valid" % username)

        if not admin_users:
            self.log.warning(
                "No admin users, admin interface will be unavailable.")
            self.log.warning(
                "Add any administrative users to `c.Authenticator.admin_users` in config."
            )

        new_users = []

        for name in admin_users:
            # ensure anyone specified as admin in config is admin in db
            user = orm.User.find(db, name)
            if user is None:
                user = orm.User(name=name, admin=True)
                new_users.append(user)
                db.add(user)
            else:
                user.admin = True

        # the admin_users config variable will never be used after this point.
        # only the database values will be referenced.

        whitelist = [
            self.authenticator.normalize_username(name)
            for name in self.authenticator.whitelist
        ]
        for username in whitelist:
            if not self.authenticator.validate_username(username):
                raise ValueError("username %r is not valid" % username)

        if not whitelist:
            self.log.info(
                "Not using whitelist. Any authenticated user will be allowed.")

        # add whitelisted users to the db
        for name in whitelist:
            user = orm.User.find(db, name)
            if user is None:
                user = orm.User(name=name)
                new_users.append(user)
                db.add(user)

        if whitelist:
            # fill the whitelist with any users loaded from the db,
            # so we are consistent in both directions.
            # This lets whitelist be used to set up initial list,
            # but changes to the whitelist can occur in the database,
            # and persist across sessions.
            for user in db.query(orm.User):
                self.authenticator.whitelist.add(user.name)

        # The whitelist set and the users in the db are now the same.
        # From this point on, any user changes should be done simultaneously
        # to the whitelist set and user db, unless the whitelist is empty (all users allowed).

        db.commit()

        for user in new_users:
            yield gen.maybe_future(self.authenticator.add_user(user))
        db.commit()

    @gen.coroutine
    def init_spawners(self):
        db = self.db

        user_summaries = ['']

        def _user_summary(user):
            parts = ['{0: >8}'.format(user.name)]
            if user.admin:
                parts.append('admin')
            if user.server:
                parts.append('running at %s' % user.server)
            return ' '.join(parts)

        @gen.coroutine
        def user_stopped(user):
            status = yield user.spawner.poll()
            self.log.warn(
                "User %s server stopped with exit code: %s",
                user.name,
                status,
            )
            yield self.proxy.delete_user(user)
            yield user.stop()

        for orm_user in db.query(orm.User):
            self.users[orm_user.id] = user = User(orm_user,
                                                  self.tornado_settings)
            if not user.state:
                # without spawner state, server isn't valid
                user.server = None
                user_summaries.append(_user_summary(user))
                continue
            self.log.debug("Loading state for %s from db", user.name)
            spawner = user.spawner
            status = yield spawner.poll()
            if status is None:
                self.log.info("%s still running", user.name)
                spawner.add_poll_callback(user_stopped, user)
                spawner.start_polling()
            else:
                # user not running. This is expected if server is None,
                # but indicates the user's server died while the Hub wasn't running
                # if user.server is defined.
                log = self.log.warn if user.server else self.log.debug
                log("%s not running.", user.name)
                user.server = None

            user_summaries.append(_user_summary(user))

        self.log.debug("Loaded users: %s", '\n'.join(user_summaries))
        db.commit()

    def init_proxy(self):
        """Load the Proxy config into the database"""
        self.proxy = self.db.query(orm.Proxy).first()
        if self.proxy is None:
            self.proxy = orm.Proxy(
                public_server=orm.Server(),
                api_server=orm.Server(),
            )
            self.db.add(self.proxy)
            self.db.commit()
        self.proxy.auth_token = self.proxy_auth_token  # not persisted
        self.proxy.log = self.log
        self.proxy.public_server.ip = self.ip
        self.proxy.public_server.port = self.port
        self.proxy.api_server.ip = self.proxy_api_ip
        self.proxy.api_server.port = self.proxy_api_port
        self.proxy.api_server.base_url = '/api/routes/'
        self.db.commit()

    @gen.coroutine
    def start_proxy(self):
        """Actually start the configurable-http-proxy"""
        # check for proxy
        if self.proxy.public_server.is_up() or self.proxy.api_server.is_up():
            # check for *authenticated* access to the proxy (auth token can change)
            try:
                yield self.proxy.get_routes()
            except (HTTPError, OSError, socket.error) as e:
                if isinstance(e, HTTPError) and e.code == 403:
                    msg = "Did CONFIGPROXY_AUTH_TOKEN change?"
                else:
                    msg = "Is something else using %s?" % self.proxy.public_server.bind_url
                self.log.error(
                    "Proxy appears to be running at %s, but I can't access it (%s)\n%s",
                    self.proxy.public_server.bind_url, e, msg)
                self.exit(1)
                return
            else:
                self.log.info("Proxy already running at: %s",
                              self.proxy.public_server.bind_url)
            self.proxy_process = None
            return

        env = os.environ.copy()
        env['CONFIGPROXY_AUTH_TOKEN'] = self.proxy.auth_token
        cmd = self.proxy_cmd + [
            '--ip',
            self.proxy.public_server.ip,
            '--port',
            str(self.proxy.public_server.port),
            '--api-ip',
            self.proxy.api_server.ip,
            '--api-port',
            str(self.proxy.api_server.port),
            '--default-target',
            self.hub.server.host,
        ]
        if self.subdomain_host:
            cmd.append('--host-routing')
        if self.debug_proxy:
            cmd.extend(['--log-level', 'debug'])
        if self.ssl_key:
            cmd.extend(['--ssl-key', self.ssl_key])
        if self.ssl_cert:
            cmd.extend(['--ssl-cert', self.ssl_cert])
        # Require SSL to be used or `--no-ssl` to confirm no SSL on
        if ' --ssl' not in ' '.join(cmd):
            if self.confirm_no_ssl:
                self.log.warning(
                    "Running JupyterHub without SSL."
                    " There better be SSL termination happening somewhere else..."
                )
            else:
                self.log.error(
                    "Refusing to run JuptyterHub without SSL."
                    " If you are terminating SSL in another layer,"
                    " pass --no-ssl to tell JupyterHub to allow the proxy to listen on HTTP."
                )
                self.exit(1)
        self.log.info("Starting proxy @ %s", self.proxy.public_server.bind_url)
        self.log.debug("Proxy cmd: %s", cmd)
        try:
            self.proxy_process = Popen(cmd, env=env)
        except FileNotFoundError as e:
            self.log.error(
                "Failed to find proxy %r\n"
                "The proxy can be installed with `npm install -g configurable-http-proxy`"
                % self.proxy_cmd)
            self.exit(1)

        def _check():
            status = self.proxy_process.poll()
            if status is not None:
                e = RuntimeError("Proxy failed to start with exit code %i" %
                                 status)
                # py2-compatible `raise e from None`
                e.__cause__ = None
                raise e

        for server in (self.proxy.public_server, self.proxy.api_server):
            for i in range(10):
                _check()
                try:
                    yield server.wait_up(1)
                except TimeoutError:
                    continue
                else:
                    break
            yield server.wait_up(1)
        self.log.debug("Proxy started and appears to be up")

    @gen.coroutine
    def check_proxy(self):
        if self.proxy_process.poll() is None:
            return
        self.log.error(
            "Proxy stopped with exit code %r", 'unknown'
            if self.proxy_process is None else self.proxy_process.poll())
        yield self.start_proxy()
        self.log.info("Setting up routes on new proxy")
        yield self.proxy.add_all_users(self.users)
        self.log.info("New proxy back up, and good to go")

    def init_tornado_settings(self):
        """Set up the tornado settings dict."""
        base_url = self.hub.server.base_url
        jinja_options = dict(autoescape=True, )
        jinja_options.update(self.jinja_environment_options)
        jinja_env = Environment(loader=FileSystemLoader(self.template_paths),
                                **jinja_options)

        login_url = self.authenticator.login_url(base_url)
        logout_url = self.authenticator.logout_url(base_url)

        # if running from git, disable caching of require.js
        # otherwise cache based on server start time
        parent = os.path.dirname(os.path.dirname(jupyterhub.__file__))
        if os.path.isdir(os.path.join(parent, '.git')):
            version_hash = ''
        else:
            version_hash = datetime.now().strftime("%Y%m%d%H%M%S"),

        subdomain_host = self.subdomain_host
        domain = urlparse(subdomain_host).hostname
        settings = dict(
            log_function=log_request,
            config=self.config,
            log=self.log,
            db=self.db,
            proxy=self.proxy,
            hub=self.hub,
            admin_users=self.authenticator.admin_users,
            admin_access=self.admin_access,
            authenticator=self.authenticator,
            spawner_class=self.spawner_class,
            base_url=self.base_url,
            cookie_secret=self.cookie_secret,
            cookie_max_age_days=self.cookie_max_age_days,
            login_url=login_url,
            logout_url=logout_url,
            static_path=os.path.join(self.data_files_path, 'static'),
            static_url_prefix=url_path_join(self.hub.server.base_url,
                                            'static/'),
            static_handler_class=CacheControlStaticFilesHandler,
            template_path=self.template_paths,
            jinja2_env=jinja_env,
            version_hash=version_hash,
            subdomain_host=subdomain_host,
            domain=domain,
        )
        # allow configured settings to have priority
        settings.update(self.tornado_settings)
        self.tornado_settings = settings
        # constructing users requires access to tornado_settings
        self.tornado_settings['users'] = self.users

    def init_tornado_application(self):
        """Instantiate the tornado Application object"""
        self.tornado_application = web.Application(self.handlers,
                                                   **self.tornado_settings)

    def write_pid_file(self):
        pid = os.getpid()
        if self.pid_file:
            self.log.debug("Writing PID %i to %s", pid, self.pid_file)
            with open(self.pid_file, 'w') as f:
                f.write('%i' % pid)

    @gen.coroutine
    @catch_config_error
    def initialize(self, *args, **kwargs):
        super().initialize(*args, **kwargs)
        if self.generate_config or self.subapp:
            return
        self.load_config_file(self.config_file)
        self.init_logging()
        if 'JupyterHubApp' in self.config:
            self.log.warn(
                "Use JupyterHub in config, not JupyterHubApp. Outdated config:\n%s",
                '\n'.join('JupyterHubApp.{key} = {value!r}'.format(key=key,
                                                                   value=value)
                          for key, value in self.config.JupyterHubApp.items()))
            cfg = self.config.copy()
            cfg.JupyterHub.merge(cfg.JupyterHubApp)
            self.update_config(cfg)
        self.write_pid_file()
        self.init_ports()
        self.init_secrets()
        self.init_db()
        self.init_hub()
        self.init_proxy()
        yield self.init_users()
        self.init_tornado_settings()
        yield self.init_spawners()
        self.init_handlers()
        self.init_tornado_application()

    @gen.coroutine
    def cleanup(self):
        """Shutdown our various subprocesses and cleanup runtime files."""

        futures = []
        if self.cleanup_servers:
            self.log.info("Cleaning up single-user servers...")
            # request (async) process termination
            for uid, user in self.users.items():
                if user.spawner is not None:
                    futures.append(user.stop())
        else:
            self.log.info("Leaving single-user servers running")

        # clean up proxy while SUS are shutting down
        if self.cleanup_proxy:
            if self.proxy_process:
                self.log.info("Cleaning up proxy[%i]...",
                              self.proxy_process.pid)
                if self.proxy_process.poll() is None:
                    try:
                        self.proxy_process.terminate()
                    except Exception as e:
                        self.log.error("Failed to terminate proxy process: %s",
                                       e)
            else:
                self.log.info("I didn't start the proxy, I can't clean it up")
        else:
            self.log.info("Leaving proxy running")

        # wait for the requests to stop finish:
        for f in futures:
            try:
                yield f
            except Exception as e:
                self.log.error("Failed to stop user: %s", e)

        self.db.commit()

        if self.pid_file and os.path.exists(self.pid_file):
            self.log.info("Cleaning up PID file %s", self.pid_file)
            os.remove(self.pid_file)

        # finally stop the loop once we are all cleaned up
        self.log.info("...done")

    def write_config_file(self):
        """Write our default config to a .py config file"""
        if os.path.exists(self.config_file) and not self.answer_yes:
            answer = ''

            def ask():
                prompt = "Overwrite %s with default config? [y/N]" % self.config_file
                try:
                    return input(prompt).lower() or 'n'
                except KeyboardInterrupt:
                    print('')  # empty line
                    return 'n'

            answer = ask()
            while not answer.startswith(('y', 'n')):
                print("Please answer 'yes' or 'no'")
                answer = ask()
            if answer.startswith('n'):
                return

        config_text = self.generate_config_file()
        if isinstance(config_text, bytes):
            config_text = config_text.decode('utf8')
        print("Writing default config to: %s" % self.config_file)
        with open(self.config_file, mode='w') as f:
            f.write(config_text)

    @gen.coroutine
    def update_last_activity(self):
        """Update User.last_activity timestamps from the proxy"""
        routes = yield self.proxy.get_routes()
        for prefix, route in routes.items():
            if 'user' not in route:
                # not a user route, ignore it
                continue
            user = orm.User.find(self.db, route['user'])
            if user is None:
                self.log.warn("Found no user for route: %s", route)
                continue
            try:
                dt = datetime.strptime(route['last_activity'], ISO8601_ms)
            except Exception:
                dt = datetime.strptime(route['last_activity'], ISO8601_s)
            user.last_activity = max(user.last_activity, dt)

        self.db.commit()
        yield self.proxy.check_routes(self.users, routes)

    @gen.coroutine
    def start(self):
        """Start the whole thing"""
        self.io_loop = loop = IOLoop.current()

        if self.subapp:
            self.subapp.start()
            loop.stop()
            return

        if self.generate_config:
            self.write_config_file()
            loop.stop()
            return

        # start the webserver
        self.http_server = tornado.httpserver.HTTPServer(
            self.tornado_application, xheaders=True)
        try:
            self.http_server.listen(self.hub_port, address=self.hub_ip)
        except Exception:
            self.log.error("Failed to bind hub to %s",
                           self.hub.server.bind_url)
            raise
        else:
            self.log.info("Hub API listening on %s", self.hub.server.bind_url)

        # start the proxy
        try:
            yield self.start_proxy()
        except Exception as e:
            self.log.critical("Failed to start proxy", exc_info=True)
            self.exit(1)
            return

        loop.add_callback(self.proxy.add_all_users, self.users)

        if self.proxy_process:
            # only check / restart the proxy if we started it in the first place.
            # this means a restarted Hub cannot restart a Proxy that its
            # predecessor started.
            pc = PeriodicCallback(self.check_proxy,
                                  1e3 * self.proxy_check_interval)
            pc.start()

        if self.last_activity_interval:
            pc = PeriodicCallback(self.update_last_activity,
                                  1e3 * self.last_activity_interval)
            pc.start()

        self.log.info("JupyterHub is now running at %s",
                      self.proxy.public_server.url)
        # register cleanup on both TERM and INT
        atexit.register(self.atexit)
        self.init_signal()

    def init_signal(self):
        signal.signal(signal.SIGTERM, self.sigterm)

    def sigterm(self, signum, frame):
        self.log.critical("Received SIGTERM, shutting down")
        self.io_loop.stop()
        self.atexit()

    _atexit_ran = False

    def atexit(self):
        """atexit callback"""
        if self._atexit_ran:
            return
        self._atexit_ran = True
        # run the cleanup step (in a new loop, because the interrupted one is unclean)
        IOLoop.clear_current()
        loop = IOLoop()
        loop.make_current()
        loop.run_sync(self.cleanup)

    def stop(self):
        if not self.io_loop:
            return
        if self.http_server:
            if self.io_loop._running:
                self.io_loop.add_callback(self.http_server.stop)
            else:
                self.http_server.stop()
        self.io_loop.add_callback(self.io_loop.stop)

    @gen.coroutine
    def launch_instance_async(self, argv=None):
        try:
            yield self.initialize(argv)
            yield self.start()
        except Exception as e:
            self.log.exception("")
            self.exit(1)

    @classmethod
    def launch_instance(cls, argv=None):
        self = cls.instance()
        loop = IOLoop.current()
        loop.add_callback(self.launch_instance_async, argv)
        try:
            loop.start()
        except KeyboardInterrupt:
            print("\nInterrupted")
Exemplo n.º 13
0
class MarketMap(DOMWidget):
    """Waffle wrapped map.

    Attributes
    ----------
    names: numpy.ndarray of strings (default: [])
        The elements can also be objects convertible to string
        primary key for the map data. A rectangle is created for each
        unique entry in this array
    groups: numpy.ndarray (default: [])
        attribute on which the groupby is run. If this is an empty arrray, then
        there is no group by for the map.
    display_text: numpy.ndarray or None(default: None)
        data to be displayed on each rectangle of the map.If this is empty it
        defaults to the names attribute.
    ref_data: pandas.DataDrame or None (default: None)
        Additional data associated with each element of the map. The data in
        this data frame can be displayed as a tooltip.
    color: numpy.ndarray (default: [])
        Data to represent the color for each of the cells. If the value of the
        data is NaN for a cell, then the color of the cell is the color of the
        group it belongs to in absence of data for color
    scales: Dictionary of scales holding a scale for each data attribute
        If the map has data being passed as color, then a corresponding color
        scale is required
    axes: List of axes
        Ability to add an axis for the scales which are used to scale data
        represented in the map
    on_hover: custom event
        This event is received when the mouse is hovering over a cell. Returns
        the data of the cell and the ref_data associated with the cell.
    tooltip_widget: Instance of a widget
        Widget to be displayed as the tooltip. This can be combined with the
        on_hover event to display the chart corresponding to the cell being
        hovered on.
    tooltip_fields: list
        names of the fields from the ref_data dataframe which should be
        displayed in the tooltip.
    tooltip_formats: list
        formats for each of the fields for the tooltip data. Order should match
        the order of the tooltip_fields
    show_groups: bool
        attribute to determine if the groups should be displayed. If set to
        True, the finer elements are blurred

    Map Drawing Attributes
    cols: int
        Suggestion for no of columns in the map.If not specified, value is
        inferred from the no of rows and no of cells
    rows: int
        No of rows in the map.If not specified, value is inferred from the no
        of cells and no of columns.
        If both rows and columns are not specified, then a square is
        constructed basing on the no of cells.
        The above two attributes are suggestions which are respected unless
        they are not feasible. One required condition is that, the number of
        columns is odd when row_groups is greater than 1.
    row_groups: int
        No of groups the rows should be divided into. This can be used to draw
        more square cells for each of the groups

    Layout Attributes

    map_margin: dict (default: {top=50, bottom=50, left=50, right=50})
        Dictionary containing the top, bottom, left and right margins. The user
        is responsible for making sure that the width and height are greater
        than the sum of the margins.
    min_aspect_ratio: float
         minimum width / height ratio of the figure
    max_aspect_ratio: float
         maximum width / height ratio of the figure


    Display Attributes

    colors: list of colors
        Colors for each of the groups which are cycled over to cover all the
        groups
    title: string
        Title of the Market Map
    title_style: dict
        CSS style for the title of the Market Map
    stroke: color
        Stroke of each of the cells of the market map
    group_stroke: color
        Stroke of the border for the group of cells corresponding to a group
    selected_stroke: color
        stroke for the selected cells
    hovered_stroke: color
        stroke for the cell being hovered on
    font_style: dict
        CSS style for the text of each cell

    Other Attributes

    enable_select: bool
        boolean to control the ability to select the cells of the map by
        clicking
    enable_hover: bool
        boolean to control if the map should be aware of which cell is being
        hovered on. If it is set to False, tooltip will not be displayed

    Note
    ----

    The aspect ratios stand for width / height ratios.

     - If the available space is within bounds in terms of min and max aspect
       ratio, we use the entire available space.
     - If the available space is too oblong horizontally, we use the client
       height and the width that corresponds max_aspect_ratio (maximize width
       under the constraints).
     - If the available space is too oblong vertically, we use the client width
       and the height that corresponds to min_aspect_ratio (maximize height
       under the constraint).
       This corresponds to maximizing the area under the constraints.

    Default min and max aspect ratio are both equal to 16 / 9.
    """
    names = Array([]).tag(sync=True, **array_serialization)
    groups = Array([]).tag(sync=True, **array_serialization)
    display_text = Array(None, allow_none=True).tag(sync=True,
                                                    **array_serialization)
    ref_data = DataFrame(None, allow_none=True).tag(
        sync=True, **dataframe_serialization).valid(dataframe_warn_indexname)
    title = Unicode().tag(sync=True)

    tooltip_fields = List().tag(sync=True)
    tooltip_formats = List().tag(sync=True)
    show_groups = Bool().tag(sync=True)

    cols = Int(allow_none=True).tag(sync=True)
    rows = Int(allow_none=True).tag(sync=True)

    row_groups = Int(1).tag(sync=True)
    colors = List(CATEGORY10).tag(sync=True)
    scales = Dict().tag(sync=True, **widget_serialization)
    axes = List().tag(sync=True, **widget_serialization)
    color = Array([]).tag(sync=True, **array_serialization)
    map_margin = Dict(dict(top=50, right=50, left=50,
                           bottom=50)).tag(sync=True)

    layout = Instance(Layout, kw={
        'min_width': '125px'
    }, allow_none=True).tag(sync=True, **widget_serialization)
    min_aspect_ratio = Float(1.0).tag(sync=True)
    # Max aspect ratio is such that we can have 3 charts stacked vertically
    # on a 16:9 monitor: 16/9*3 ~ 5.333
    max_aspect_ratio = Float(6.0).tag(sync=True)

    stroke = Color('white').tag(sync=True)
    group_stroke = Color('black').tag(sync=True)
    selected_stroke = Color('dodgerblue', allow_none=True).tag(sync=True)
    hovered_stroke = Color('orangered', allow_none=True).tag(sync=True)
    font_style = Dict().tag(sync=True)
    title_style = Dict().tag(sync=True)

    selected = List().tag(sync=True)
    enable_hover = Bool(True).tag(sync=True)
    enable_select = Bool(True).tag(sync=True)
    tooltip_widget = Instance(DOMWidget, allow_none=True,
                              default_value=None).tag(sync=True,
                                                      **widget_serialization)

    def __init__(self, **kwargs):
        super(MarketMap, self).__init__(**kwargs)
        self._hover_handlers = CallbackDispatcher()
        self.on_msg(self._handle_custom_msgs)

    def on_hover(self, callback, remove=False):
        self._hover_handlers.register_callback(callback, remove=remove)

    def _handle_custom_msgs(self, _, content, buffers=None):
        if content.get('event', '') == 'hover':
            self._hover_handlers(self, content)

    _view_name = Unicode('MarketMap').tag(sync=True)
    _model_name = Unicode('MarketMapModel').tag(sync=True)
    _view_module = Unicode('bqplot').tag(sync=True)
    _model_module = Unicode('bqplot').tag(sync=True)
    _view_module_version = Unicode(__frontend_version__).tag(sync=True)
    _model_module_version = Unicode(__frontend_version__).tag(sync=True)
Exemplo n.º 14
0
class RiskConfig(HasTraits):
    max_drawdown = Float(
        default_value=100.0)  # % Max drawdown before liquidation
    max_risk = Float(default_value=100.0)  # % Max to risk
    total_funds = Float(default_value=0.0)  # total funds available
    trading_type = Instance(klass=TradingType, args=('NONE', ), kwargs={})
Exemplo n.º 15
0
class Kernel(SingletonConfigurable):

    #---------------------------------------------------------------------------
    # Kernel interface
    #---------------------------------------------------------------------------

    # attribute to override with a GUI
    eventloop = Any(None)

    @observe('eventloop')
    def _update_eventloop(self, change):
        """schedule call to eventloop from IOLoop"""
        loop = ioloop.IOLoop.instance()
        loop.add_callback(self.enter_eventloop)

    session = Instance(Session, allow_none=True)
    profile_dir = Instance('IPython.core.profiledir.ProfileDir',
                           allow_none=True)
    shell_streams = List()
    control_stream = Instance(ZMQStream, allow_none=True)
    iopub_socket = Any()
    iopub_thread = Any()
    stdin_socket = Any()
    log = Instance(logging.Logger, allow_none=True)

    # identities:
    int_id = Integer(-1)
    ident = Unicode()

    @default('ident')
    def _default_ident(self):
        return unicode_type(uuid.uuid4())

    # This should be overridden by wrapper kernels that implement any real
    # language.
    language_info = {}

    # any links that should go in the help menu
    help_links = List()

    # Private interface

    _darwin_app_nap = Bool(
        True,
        help="""Whether to use appnope for compatiblity with OS X App Nap.

        Only affects OS X >= 10.9.
        """).tag(config=True)

    # track associations with current request
    _allow_stdin = Bool(False)
    _parent_header = Dict()
    _parent_ident = Any(b'')
    # Time to sleep after flushing the stdout/err buffers in each execute
    # cycle.  While this introduces a hard limit on the minimal latency of the
    # execute cycle, it helps prevent output synchronization problems for
    # clients.
    # Units are in seconds.  The minimum zmq latency on local host is probably
    # ~150 microseconds, set this to 500us for now.  We may need to increase it
    # a little if it's not enough after more interactive testing.
    _execute_sleep = Float(0.0005).tag(config=True)

    # Frequency of the kernel's event loop.
    # Units are in seconds, kernel subclasses for GUI toolkits may need to
    # adapt to milliseconds.
    _poll_interval = Float(0.05).tag(config=True)

    # If the shutdown was requested over the network, we leave here the
    # necessary reply message so it can be sent by our registered atexit
    # handler.  This ensures that the reply is only sent to clients truly at
    # the end of our shutdown process (which happens after the underlying
    # IPython shell's own shutdown).
    _shutdown_message = None

    # This is a dict of port number that the kernel is listening on. It is set
    # by record_ports and used by connect_request.
    _recorded_ports = Dict()

    # set of aborted msg_ids
    aborted = Set()

    # Track execution count here. For IPython, we override this to use the
    # execution count we store in the shell.
    execution_count = 0

    msg_types = [
        'execute_request',
        'complete_request',
        'inspect_request',
        'history_request',
        'comm_info_request',
        'kernel_info_request',
        'connect_request',
        'shutdown_request',
        'is_complete_request',
        # deprecated:
        'apply_request',
    ]
    # add deprecated ipyparallel control messages
    control_msg_types = msg_types + ['clear_request', 'abort_request']

    def __init__(self, **kwargs):
        super(Kernel, self).__init__(**kwargs)

        # Build dict of handlers for message types
        self.shell_handlers = {}
        for msg_type in self.msg_types:
            self.shell_handlers[msg_type] = getattr(self, msg_type)

        self.control_handlers = {}
        for msg_type in self.control_msg_types:
            self.control_handlers[msg_type] = getattr(self, msg_type)

    def dispatch_control(self, msg):
        """dispatch control requests"""
        idents, msg = self.session.feed_identities(msg, copy=False)
        try:
            msg = self.session.deserialize(msg, content=True, copy=False)
        except:
            self.log.error("Invalid Control Message", exc_info=True)
            return

        self.log.debug("Control received: %s", msg)

        # Set the parent message for side effects.
        self.set_parent(idents, msg)
        self._publish_status(u'busy')

        header = msg['header']
        msg_type = header['msg_type']

        handler = self.control_handlers.get(msg_type, None)
        if handler is None:
            self.log.error("UNKNOWN CONTROL MESSAGE TYPE: %r", msg_type)
        else:
            try:
                handler(self.control_stream, idents, msg)
            except Exception:
                self.log.error("Exception in control handler:", exc_info=True)

        sys.stdout.flush()
        sys.stderr.flush()
        self._publish_status(u'idle')

    def should_handle(self, stream, msg, idents):
        """Check whether a shell-channel message should be handled

        Allows subclasses to prevent handling of certain messages (e.g. aborted requests).
        """
        msg_id = msg['header']['msg_id']
        if msg_id in self.aborted:
            msg_type = msg['header']['msg_type']
            # is it safe to assume a msg_id will not be resubmitted?
            self.aborted.remove(msg_id)
            reply_type = msg_type.split('_')[0] + '_reply'
            status = {'status': 'aborted'}
            md = {'engine': self.ident}
            md.update(status)
            self.session.send(stream,
                              reply_type,
                              metadata=md,
                              content=status,
                              parent=msg,
                              ident=idents)
            return False
        return True

    def dispatch_shell(self, stream, msg):
        """dispatch shell requests"""
        # flush control requests first
        if self.control_stream:
            self.control_stream.flush()

        idents, msg = self.session.feed_identities(msg, copy=False)
        try:
            msg = self.session.deserialize(msg, content=True, copy=False)
        except:
            self.log.error("Invalid Message", exc_info=True)
            return

        # Set the parent message for side effects.
        self.set_parent(idents, msg)
        self._publish_status(u'busy')

        header = msg['header']
        msg_id = header['msg_id']
        msg_type = msg['header']['msg_type']

        # Print some info about this message and leave a '--->' marker, so it's
        # easier to trace visually the message chain when debugging.  Each
        # handler prints its message at the end.
        self.log.debug('\n*** MESSAGE TYPE:%s***', msg_type)
        self.log.debug('   Content: %s\n   --->\n   ', msg['content'])

        if not self.should_handle(stream, msg, idents):
            return

        handler = self.shell_handlers.get(msg_type, None)
        if handler is None:
            self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type)
        else:
            self.log.debug("%s: %s", msg_type, msg)
            self.pre_handler_hook()
            try:
                handler(stream, idents, msg)
            except Exception:
                self.log.error("Exception in message handler:", exc_info=True)
            finally:
                self.post_handler_hook()

        sys.stdout.flush()
        sys.stderr.flush()
        self._publish_status(u'idle')

    def pre_handler_hook(self):
        """Hook to execute before calling message handler"""
        # ensure default_int_handler during handler call
        self.saved_sigint_handler = signal(SIGINT, default_int_handler)

    def post_handler_hook(self):
        """Hook to execute after calling message handler"""
        signal(SIGINT, self.saved_sigint_handler)

    def enter_eventloop(self):
        """enter eventloop"""
        self.log.info("entering eventloop %s", self.eventloop)
        for stream in self.shell_streams:
            # flush any pending replies,
            # which may be skipped by entering the eventloop
            stream.flush(zmq.POLLOUT)
        # restore default_int_handler
        signal(SIGINT, default_int_handler)
        while self.eventloop is not None:
            try:
                self.eventloop(self)
            except KeyboardInterrupt:
                # Ctrl-C shouldn't crash the kernel
                self.log.error("KeyboardInterrupt caught in kernel")
                continue
            else:
                # eventloop exited cleanly, this means we should stop (right?)
                self.eventloop = None
                break
        self.log.info("exiting eventloop")

    def start(self):
        """register dispatchers for streams"""
        if self.control_stream:
            self.control_stream.on_recv(self.dispatch_control, copy=False)

        def make_dispatcher(stream):
            def dispatcher(msg):
                return self.dispatch_shell(stream, msg)

            return dispatcher

        for s in self.shell_streams:
            s.on_recv(make_dispatcher(s), copy=False)

        # publish idle status
        self._publish_status('starting')

    def do_one_iteration(self):
        """step eventloop just once"""
        if self.control_stream:
            self.control_stream.flush()
        for stream in self.shell_streams:
            # handle at most one request per iteration
            stream.flush(zmq.POLLIN, 1)
            stream.flush(zmq.POLLOUT)

    def record_ports(self, ports):
        """Record the ports that this kernel is using.

        The creator of the Kernel instance must call this methods if they
        want the :meth:`connect_request` method to return the port numbers.
        """
        self._recorded_ports = ports

    #---------------------------------------------------------------------------
    # Kernel request handlers
    #---------------------------------------------------------------------------

    def _publish_execute_input(self, code, parent, execution_count):
        """Publish the code request on the iopub stream."""

        self.session.send(self.iopub_socket,
                          u'execute_input', {
                              u'code': code,
                              u'execution_count': execution_count
                          },
                          parent=parent,
                          ident=self._topic('execute_input'))

    def _publish_status(self, status, parent=None):
        """send status (busy/idle) on IOPub"""
        self.session.send(
            self.iopub_socket,
            u'status',
            {u'execution_state': status},
            parent=parent or self._parent_header,
            ident=self._topic('status'),
        )

    def set_parent(self, ident, parent):
        """Set the current parent_header

        Side effects (IOPub messages) and replies are associated with
        the request that caused them via the parent_header.

        The parent identity is used to route input_request messages
        on the stdin channel.
        """
        self._parent_ident = ident
        self._parent_header = parent

    def send_response(self,
                      stream,
                      msg_or_type,
                      content=None,
                      ident=None,
                      buffers=None,
                      track=False,
                      header=None,
                      metadata=None):
        """Send a response to the message we're currently processing.

        This accepts all the parameters of :meth:`jupyter_client.session.Session.send`
        except ``parent``.

        This relies on :meth:`set_parent` having been called for the current
        message.
        """
        return self.session.send(stream, msg_or_type, content,
                                 self._parent_header, ident, buffers, track,
                                 header, metadata)

    def init_metadata(self, parent):
        """Initialize metadata.

        Run at the beginning of execution requests.
        """
        return {
            'started': datetime.now(),
        }

    def finish_metadata(self, parent, metadata, reply_content):
        """Finish populating metadata.

        Run after completing an execution request.
        """
        return metadata

    def execute_request(self, stream, ident, parent):
        """handle an execute_request"""

        try:
            content = parent[u'content']
            code = py3compat.cast_unicode_py2(content[u'code'])
            silent = content[u'silent']
            store_history = content.get(u'store_history', not silent)
            user_expressions = content.get('user_expressions', {})
            allow_stdin = content.get('allow_stdin', False)
        except:
            self.log.error("Got bad msg: ")
            self.log.error("%s", parent)
            return

        stop_on_error = content.get('stop_on_error', True)

        metadata = self.init_metadata(parent)

        # Re-broadcast our input for the benefit of listening clients, and
        # start computing output
        if not silent:
            self.execution_count += 1
            self._publish_execute_input(code, parent, self.execution_count)

        reply_content = self.do_execute(code, silent, store_history,
                                        user_expressions, allow_stdin)

        # Flush output before sending the reply.
        sys.stdout.flush()
        sys.stderr.flush()
        # FIXME: on rare occasions, the flush doesn't seem to make it to the
        # clients... This seems to mitigate the problem, but we definitely need
        # to better understand what's going on.
        if self._execute_sleep:
            time.sleep(self._execute_sleep)

        # Send the reply.
        reply_content = json_clean(reply_content)
        metadata = self.finish_metadata(parent, metadata, reply_content)

        reply_msg = self.session.send(stream,
                                      u'execute_reply',
                                      reply_content,
                                      parent,
                                      metadata=metadata,
                                      ident=ident)

        self.log.debug("%s", reply_msg)

        if not silent and reply_msg['content'][
                'status'] == u'error' and stop_on_error:
            self._abort_queues()

    def do_execute(self,
                   code,
                   silent,
                   store_history=True,
                   user_expressions=None,
                   allow_stdin=False):
        """Execute user code. Must be overridden by subclasses.
        """
        raise NotImplementedError

    def complete_request(self, stream, ident, parent):
        content = parent['content']
        code = content['code']
        cursor_pos = content['cursor_pos']

        matches = self.do_complete(code, cursor_pos)
        matches = json_clean(matches)
        completion_msg = self.session.send(stream, 'complete_reply', matches,
                                           parent, ident)
        self.log.debug("%s", completion_msg)

    def do_complete(self, code, cursor_pos):
        """Override in subclasses to find completions.
        """
        return {
            'matches': [],
            'cursor_end': cursor_pos,
            'cursor_start': cursor_pos,
            'metadata': {},
            'status': 'ok'
        }

    def inspect_request(self, stream, ident, parent):
        content = parent['content']

        reply_content = self.do_inspect(content['code'], content['cursor_pos'],
                                        content.get('detail_level', 0))
        # Before we send this object over, we scrub it for JSON usage
        reply_content = json_clean(reply_content)
        msg = self.session.send(stream, 'inspect_reply', reply_content, parent,
                                ident)
        self.log.debug("%s", msg)

    def do_inspect(self, code, cursor_pos, detail_level=0):
        """Override in subclasses to allow introspection.
        """
        return {'status': 'ok', 'data': {}, 'metadata': {}, 'found': False}

    def history_request(self, stream, ident, parent):
        content = parent['content']

        reply_content = self.do_history(**content)

        reply_content = json_clean(reply_content)
        msg = self.session.send(stream, 'history_reply', reply_content, parent,
                                ident)
        self.log.debug("%s", msg)

    def do_history(self,
                   hist_access_type,
                   output,
                   raw,
                   session=None,
                   start=None,
                   stop=None,
                   n=None,
                   pattern=None,
                   unique=False):
        """Override in subclasses to access history.
        """
        return {'history': []}

    def connect_request(self, stream, ident, parent):
        if self._recorded_ports is not None:
            content = self._recorded_ports.copy()
        else:
            content = {}
        msg = self.session.send(stream, 'connect_reply', content, parent,
                                ident)
        self.log.debug("%s", msg)

    @property
    def kernel_info(self):
        return {
            'protocol_version': kernel_protocol_version,
            'implementation': self.implementation,
            'implementation_version': self.implementation_version,
            'language_info': self.language_info,
            'banner': self.banner,
            'help_links': self.help_links,
        }

    def kernel_info_request(self, stream, ident, parent):
        msg = self.session.send(stream, 'kernel_info_reply', self.kernel_info,
                                parent, ident)
        self.log.debug("%s", msg)

    def comm_info_request(self, stream, ident, parent):
        content = parent['content']
        target_name = content.get('target_name', None)

        # Should this be moved to ipkernel?
        if hasattr(self, 'comm_manager'):
            comms = {
                k: dict(target_name=v.target_name)
                for (k, v) in self.comm_manager.comms.items()
                if v.target_name == target_name or target_name is None
            }
        else:
            comms = {}
        reply_content = dict(comms=comms)
        msg = self.session.send(stream, 'comm_info_reply', reply_content,
                                parent, ident)
        self.log.debug("%s", msg)

    def shutdown_request(self, stream, ident, parent):
        content = self.do_shutdown(parent['content']['restart'])
        self.session.send(stream,
                          u'shutdown_reply',
                          content,
                          parent,
                          ident=ident)
        # same content, but different msg_id for broadcasting on IOPub
        self._shutdown_message = self.session.msg(u'shutdown_reply', content,
                                                  parent)

        self._at_shutdown()
        # call sys.exit after a short delay
        loop = ioloop.IOLoop.instance()
        loop.add_timeout(time.time() + 0.1, loop.stop)

    def do_shutdown(self, restart):
        """Override in subclasses to do things when the frontend shuts down the
        kernel.
        """
        return {'status': 'ok', 'restart': restart}

    def is_complete_request(self, stream, ident, parent):
        content = parent['content']
        code = content['code']

        reply_content = self.do_is_complete(code)
        reply_content = json_clean(reply_content)
        reply_msg = self.session.send(stream, 'is_complete_reply',
                                      reply_content, parent, ident)
        self.log.debug("%s", reply_msg)

    def do_is_complete(self, code):
        """Override in subclasses to find completions.
        """
        return {
            'status': 'unknown',
        }

    #---------------------------------------------------------------------------
    # Engine methods (DEPRECATED)
    #---------------------------------------------------------------------------

    def apply_request(self, stream, ident, parent):
        self.log.warn(
            """apply_request is deprecated in kernel_base, moving to ipyparallel."""
        )
        try:
            content = parent[u'content']
            bufs = parent[u'buffers']
            msg_id = parent['header']['msg_id']
        except:
            self.log.error("Got bad msg: %s", parent, exc_info=True)
            return

        md = self.init_metadata(parent)

        reply_content, result_buf = self.do_apply(content, bufs, msg_id, md)

        # flush i/o
        sys.stdout.flush()
        sys.stderr.flush()

        md = self.finish_metadata(parent, md, reply_content)

        self.session.send(stream,
                          u'apply_reply',
                          reply_content,
                          parent=parent,
                          ident=ident,
                          buffers=result_buf,
                          metadata=md)

    def do_apply(self, content, bufs, msg_id, reply_metadata):
        """DEPRECATED"""
        raise NotImplementedError

    #---------------------------------------------------------------------------
    # Control messages (DEPRECATED)
    #---------------------------------------------------------------------------

    def abort_request(self, stream, ident, parent):
        """abort a specific msg by id"""
        self.log.warn(
            "abort_request is deprecated in kernel_base. It os only part of IPython parallel"
        )
        msg_ids = parent['content'].get('msg_ids', None)
        if isinstance(msg_ids, string_types):
            msg_ids = [msg_ids]
        if not msg_ids:
            self._abort_queues()
        for mid in msg_ids:
            self.aborted.add(str(mid))

        content = dict(status='ok')
        reply_msg = self.session.send(stream,
                                      'abort_reply',
                                      content=content,
                                      parent=parent,
                                      ident=ident)
        self.log.debug("%s", reply_msg)

    def clear_request(self, stream, idents, parent):
        """Clear our namespace."""
        self.log.warn(
            "clear_request is deprecated in kernel_base. It os only part of IPython parallel"
        )
        content = self.do_clear()
        self.session.send(stream,
                          'clear_reply',
                          ident=idents,
                          parent=parent,
                          content=content)

    def do_clear(self):
        """DEPRECATED"""
        raise NotImplementedError

    #---------------------------------------------------------------------------
    # Protected interface
    #---------------------------------------------------------------------------

    def _topic(self, topic):
        """prefixed topic for IOPub messages"""
        base = "kernel.%s" % self.ident

        return py3compat.cast_bytes("%s.%s" % (base, topic))

    def _abort_queues(self):
        for stream in self.shell_streams:
            if stream:
                self._abort_queue(stream)

    def _abort_queue(self, stream):
        poller = zmq.Poller()
        poller.register(stream.socket, zmq.POLLIN)
        while True:
            idents, msg = self.session.recv(stream, zmq.NOBLOCK, content=True)
            if msg is None:
                return

            self.log.info("Aborting:")
            self.log.info("%s", msg)
            msg_type = msg['header']['msg_type']
            reply_type = msg_type.split('_')[0] + '_reply'

            status = {'status': 'aborted'}
            md = {'engine': self.ident}
            md.update(status)
            reply_msg = self.session.send(stream,
                                          reply_type,
                                          metadata=md,
                                          content=status,
                                          parent=msg,
                                          ident=idents)
            self.log.debug("%s", reply_msg)
            # We need to wait a bit for requests to come in. This can probably
            # be set shorter for true asynchronous clients.
            poller.poll(50)

    def _no_raw_input(self):
        """Raise StdinNotImplentedError if active frontend doesn't support
        stdin."""
        raise StdinNotImplementedError("raw_input was called, but this "
                                       "frontend does not support stdin.")

    def getpass(self, prompt=''):
        """Forward getpass to frontends

        Raises
        ------
        StdinNotImplentedError if active frontend doesn't support stdin.
        """
        if not self._allow_stdin:
            raise StdinNotImplementedError(
                "getpass was called, but this frontend does not support input requests."
            )
        return self._input_request(
            prompt,
            self._parent_ident,
            self._parent_header,
            password=True,
        )

    def raw_input(self, prompt=''):
        """Forward raw_input to frontends

        Raises
        ------
        StdinNotImplentedError if active frontend doesn't support stdin.
        """
        if not self._allow_stdin:
            raise StdinNotImplementedError(
                "raw_input was called, but this frontend does not support input requests."
            )
        return self._input_request(
            prompt,
            self._parent_ident,
            self._parent_header,
            password=False,
        )

    def _input_request(self, prompt, ident, parent, password=False):
        # Flush output before making the request.
        sys.stderr.flush()
        sys.stdout.flush()
        # flush the stdin socket, to purge stale replies
        while True:
            try:
                self.stdin_socket.recv_multipart(zmq.NOBLOCK)
            except zmq.ZMQError as e:
                if e.errno == zmq.EAGAIN:
                    break
                else:
                    raise

        # Send the input request.
        content = json_clean(dict(prompt=prompt, password=password))
        self.session.send(self.stdin_socket,
                          u'input_request',
                          content,
                          parent,
                          ident=ident)

        # Await a response.
        while True:
            try:
                ident, reply = self.session.recv(self.stdin_socket, 0)
            except Exception:
                self.log.warn("Invalid Message:", exc_info=True)
            except KeyboardInterrupt:
                # re-raise KeyboardInterrupt, to truncate traceback
                raise KeyboardInterrupt
            else:
                break
        try:
            value = py3compat.unicode_to_str(reply['content']['value'])
        except:
            self.log.error("Bad input_reply: %s", parent)
            value = ''
        if value == '\x04':
            # EOF
            raise EOFError
        return value

    def _at_shutdown(self):
        """Actions taken at shutdown by the kernel, called by python's atexit.
        """
        # io.rprint("Kernel at_shutdown") # dbg
        if self._shutdown_message is not None:
            self.session.send(self.iopub_socket,
                              self._shutdown_message,
                              ident=self._topic('shutdown'))
            self.log.debug("%s", self._shutdown_message)
        [s.flush(zmq.POLLOUT) for s in self.shell_streams]
Exemplo n.º 16
0
class BM25Para(Configurable):
    """
    bm25's para
    """
    k1 = Float(BM25_K1).tag(config=True)
    b = Float(BM25_B).tag(config=True)
Exemplo n.º 17
0
class KubeClusterManager(ClusterManager):
    """A cluster manager for deploying Dask on a Kubernetes cluster."""

    namespace = Unicode(
        "default",
        help="""
        Kubernetes namespace to launch pods in.

        If running inside a kubernetes cluster with service accounts enabled,
        defaults to the current namespace. If not, defaults to `default`
        """,
        config=True,
    )

    @default("namespace")
    def _default_namespace(self):
        """
        Set namespace default to current namespace if running in a k8s cluster

        If not in a k8s cluster with service accounts enabled, default to
        `default`
        """
        ns_path = "/var/run/secrets/kubernetes.io/serviceaccount/namespace"
        if os.path.exists(ns_path):
            with open(ns_path) as f:
                return f.read().strip()
        return "default"

    image = Unicode(
        "jcrist/dask-gateway:latest",
        help="Docker image to use for running user's containers.",
        config=True,
    )

    image_pull_policy = Unicode(
        "IfNotPresent",
        help="The image pull policy of the docker image specified in ``image``",
        config=True,
    )

    image_pull_secrets = List(
        trait=Unicode(),
        help="""
        A list of secrets to use for pulling images from a private repository.

        See `the Kubernetes documentation
        <https://kubernetes.io/docs/concepts/containers/images/#specifying-imagepullsecrets-on-a-pod>`__
        for more information.
        """,
        config=True,
    )

    working_dir = Unicode(
        help="""
        The working directory where the command will be run inside the
        container. Default is the working directory defined in the Dockerfile.
        """,
        config=True,
    )

    common_labels = Dict(
        {
            "app.kubernetes.io/name": "dask-gateway",
            "app.kubernetes.io/version": VERSION.replace("+", "_"),
        },
        help="Kubernetes labels to apply to all objects created by the gateway",
        config=True,
    )

    common_annotations = Dict(
        help=
        "Kubernetes annotations to apply to all objects created by the gateway",
        config=True,
    )

    # Kubernetes is a bit different in types/granularity of resource requests.
    # We redefine these common fields here to support that.
    worker_cores = Float(
        1,
        min=0,
        help="""
        Number of cpu-cores available for a dask worker.
        """,
        config=True,
    )

    worker_cores_limit = Float(
        min=0,
        help="""
        Maximum number of cpu-cores available for a dask worker.

        Defaults to ``worker_cores``.
        """,
        config=True,
    )

    @default("worker_cores_limit")
    def _default_worker_cores_limit(self):
        return self.worker_cores

    worker_memory_limit = MemoryLimit(
        help="""
        Maximum number of bytes available for a dask worker. Allows the
        following suffixes:

        - K -> Kibibytes
        - M -> Mebibytes
        - G -> Gibibytes
        - T -> Tebibytes

        Defaults to ``worker_memory``.
        """,
        config=True,
    )

    @default("worker_memory_limit")
    def _default_worker_memory_limit(self):
        return self.worker_memory

    scheduler_cores = Float(
        1,
        min=0,
        help="""
        Number of cpu-cores available for a dask scheduler.
        """,
        config=True,
    )

    scheduler_cores_limit = Float(
        min=0,
        help="""
        Maximum number of cpu-cores available for a dask scheduler.

        Defaults to ``scheduler_cores``.
        """,
        config=True,
    )

    @default("scheduler_cores_limit")
    def _default_scheduler_cores_limit(self):
        return self.scheduler_cores

    scheduler_memory_limit = MemoryLimit(
        help="""
        Maximum number of bytes available for a dask scheduler. Allows the
        following suffixes:

        - K -> Kibibytes
        - M -> Mebibytes
        - G -> Gibibytes
        - T -> Tebibytes

        Defaults to ``scheduler_memory``.
        """,
        config=True,
    )

    @default("scheduler_memory_limit")
    def _default_scheduler_memory_limit(self):
        return self.scheduler_memory

    # Internal fields
    kube_client = Instance(kubernetes.client.CoreV1Api)

    @default("kube_client")
    def _default_kube_client(self):
        configure_kubernetes_clients()
        return kubernetes.client.CoreV1Api()

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        # Starts the pod reflector for the first instance only
        self.pod_reflector = PodReflector.instance(
            parent=self.parent or self,
            kube_client=self.kube_client,
            namespace=self.namespace,
            label_selector=self.pod_label_selector,
        )

    def get_tls_paths(self):
        """Get the absolute paths to the tls cert and key files."""
        return "/etc/dask-credentials/dask.crt", "/etc/dask-credentials/dask.pem"

    @property
    def worker_command(self):
        """The full command (with args) to launch a dask worker"""
        return [
            self.worker_cmd,
            "--nthreads",
            str(int(self.worker_cores_limit)),
            "--memory-limit",
            str(self.worker_memory_limit),
        ]

    @property
    def scheduler_command(self):
        """The full command (with args) to launch a dask scheduler"""
        return [self.scheduler_cmd]

    @property
    def pod_label_selector(self):
        """A label selector for all pods started by dask-gateway"""
        return ",".join("%s=%s" % (k, v)
                        for k, v in self.common_labels.items())

    def get_labels_for(self, component, worker_name=None):
        labels = self.common_labels.copy()
        labels.update({
            "app.kubernetes.io/component": component,
            "cluster-name": self.cluster_name,
        })
        if worker_name:
            labels["worker-name"] = worker_name
        return labels

    def make_secret_spec(self):
        name = "dask-gateway-tls-%s" % self.cluster_name
        labels = self.get_labels_for("dask-gateway-tls")
        annotations = self.common_annotations

        secret = V1Secret(
            kind="Secret",
            api_version="v1",
            string_data={
                "dask.crt": self.tls_cert.decode(),
                "dask.pem": self.tls_key.decode(),
            },
            metadata=V1ObjectMeta(name=name,
                                  labels=labels,
                                  annotations=annotations),
        )
        return secret

    def make_pod_spec(self, tls_secret, worker_name=None):
        annotations = self.common_annotations
        env = self.get_env()

        if worker_name is not None:
            # Worker
            name = "dask-gateway-worker-%s" % worker_name
            container_name = "dask-gateway-worker"
            labels = self.get_labels_for("dask-gateway-worker",
                                         worker_name=worker_name)
            mem_req = self.worker_memory
            mem_lim = self.worker_memory_limit
            cpu_req = self.worker_cores
            cpu_lim = self.worker_cores_limit
            env["DASK_GATEWAY_WORKER_NAME"] = worker_name
            cmd = self.worker_command
        else:
            # Scheduler
            name = "dask-gateway-scheduler-%s" % self.cluster_name
            container_name = "dask-gateway-scheduler"
            labels = self.get_labels_for("dask-gateway-scheduler")
            mem_req = self.scheduler_memory
            mem_lim = self.scheduler_memory_limit
            cpu_req = self.scheduler_cores
            cpu_lim = self.scheduler_cores_limit
            cmd = self.scheduler_command

        volume = V1Volume(name="dask-credentials",
                          secret=V1SecretVolumeSource(secret_name=tls_secret))

        container = V1Container(
            name=container_name,
            image=self.image,
            args=cmd,
            env=[V1EnvVar(k, v) for k, v in env.items()],
            working_dir=self.working_dir or None,
            image_pull_policy=self.image_pull_policy,
            resources=V1ResourceRequirements(
                requests={
                    "cpu": cpu_req,
                    "memory": mem_req
                },
                limits={
                    "cpu": cpu_lim,
                    "memory": mem_lim
                },
            ),
            volume_mounts=[
                V1VolumeMount(
                    name=volume.name,
                    mount_path="/etc/dask-credentials/",
                    read_only=True,
                )
            ],
        )

        pod = V1Pod(
            kind="Pod",
            api_version="v1",
            metadata=V1ObjectMeta(name=name,
                                  labels=labels,
                                  annotations=annotations),
            spec=V1PodSpec(containers=[container],
                           volumes=[volume],
                           restart_policy="Never"),
        )

        if self.image_pull_secrets:
            pod.spec.image_pull_secrets = [
                V1LocalObjectReference(name=s) for s in self.image_pull_secrets
            ]

        # Ensure we don't accidentally give access to the kubernetes API
        pod.spec.automount_service_account_token = False

        return pod

    async def start_cluster(self):
        tls_secret = self.make_secret_spec()

        secret_name = tls_secret.metadata.name

        self.log.debug("Creating secret %s", secret_name)

        loop = get_running_loop()
        await loop.run_in_executor(None,
                                   self.kube_client.create_namespaced_secret,
                                   self.namespace, tls_secret)
        yield {"secret_name": secret_name}

        pod = self.make_pod_spec(secret_name)

        self.log.debug("Starting pod %s", pod.metadata.name)

        await loop.run_in_executor(None,
                                   self.kube_client.create_namespaced_pod,
                                   self.namespace, pod)

        yield {"secret_name": secret_name, "pod_name": pod.metadata.name}

    async def pod_status(self, pod_name, container_name):
        if pod_name is None:
            return

        # Ensure initial data already loaded
        if not self.pod_reflector.first_load_future.done():
            await self.pod_reflector.first_load_future

        pod = self.pod_reflector.pods.get(pod_name)
        if pod is not None:
            if pod.status.phase == "Pending":
                return True
            if pod.status.container_statuses is None:
                return False
            for c in pod.status.container_statuses:
                if c.name == container_name:
                    if c.state.terminated:
                        msg = ("Container stopped with exit code %d" %
                               c.state.terminated.exit_code)
                        return False, msg
                    return True
        # pod doesn't exist or has been deleted
        return False, ("Pod %s already deleted" % pod_name)

    async def cluster_status(self, cluster_state):
        return await self.pod_status(cluster_state.get("pod_name"),
                                     "dask-gateway-scheduler")

    async def worker_status(self, worker_name, worker_state, cluster_state):
        return await self.pod_status(worker_state.get("pod_name"),
                                     "dask-gateway-worker")

    async def stop_cluster(self, cluster_state):
        loop = get_running_loop()

        pod_name = cluster_state.get("pod_name")
        if pod_name is not None:
            await loop.run_in_executor(None,
                                       self.kube_client.delete_namespaced_pod,
                                       pod_name, self.namespace)

        secret_name = cluster_state.get("secret_name")
        if secret_name is not None:
            await loop.run_in_executor(
                None,
                self.kube_client.delete_namespaced_secret,
                secret_name,
                self.namespace,
            )

    async def start_worker(self, worker_name, cluster_state):
        secret_name = cluster_state["secret_name"]

        pod = self.make_pod_spec(secret_name, worker_name)

        self.log.debug("Starting pod %s", pod.metadata.name)

        loop = get_running_loop()
        await loop.run_in_executor(None,
                                   self.kube_client.create_namespaced_pod,
                                   self.namespace, pod)

        yield {"pod_name": pod.metadata.name}

    async def stop_worker(self, worker_name, worker_state, cluster_state):
        pod_name = worker_state.get("pod_name")
        if pod_name is not None:
            loop = get_running_loop()
            await loop.run_in_executor(None,
                                       self.kube_client.delete_namespaced_pod,
                                       pod_name, self.namespace)
Exemplo n.º 18
0
class LmPara(Configurable):
    dir_mu = Int(LM_DIR_MU).tag(config=True)
    min_tf = Float(LM_MIN_TF).tag(config=True)
    jm_lambda = Float(LM_JM_LAMBDA).tag(config=True)
Exemplo n.º 19
0
class JupytextConfiguration(Configurable):
    """Jupytext Configuration's options"""

    default_jupytext_formats = Unicode(
        u"",
        help="Save notebooks to these file extensions. "
        "Can be any of ipynb,Rmd,md,jl,py,R,nb.jl,nb.py,nb.R "
        "comma separated. If you want another format than the "
        "default one, append the format name to the extension, "
        "e.g. ipynb,py:percent to save the notebook to "
        "hydrogen/spyder/vscode compatible scripts",
        config=True,
    )

    preferred_jupytext_formats_save = Unicode(
        u"",
        help="Preferred format when saving notebooks as text, per extension. "
        'Use "jl:percent,py:percent,R:percent" if you want to save '
        "Julia, Python and R scripts in the double percent format and "
        'only write "jupytext_formats": "py" in the notebook metadata.',
        config=True,
    )

    preferred_jupytext_formats_read = Unicode(
        u"",
        help="Preferred format when reading notebooks from text, per "
        'extension. Use "py:sphinx" if you want to read all python '
        "scripts as Sphinx gallery scripts.",
        config=True,
    )

    default_notebook_metadata_filter = Unicode(
        u"",
        help="Cell metadata that should be save in the text representations. "
        "Examples: 'all', '-all', 'widgets,nteract', 'kernelspec,jupytext-all'",
        config=True,
    )

    default_cell_metadata_filter = Unicode(
        u"",
        help=
        "Notebook metadata that should be saved in the text representations. "
        "Examples: 'all', 'hide_input,hide_output'",
        config=True,
    )

    comment_magics = Enum(
        values=[True, False],
        allow_none=True,
        help=
        "Should Jupyter magic commands be commented out in the text representation?",
        config=True,
    )

    split_at_heading = Bool(
        False,
        help=
        "Split markdown cells on headings (Markdown and R Markdown formats only)",
        config=True,
    )

    sphinx_convert_rst2md = Bool(
        False,
        help=
        "When opening a Sphinx Gallery script, convert the reStructuredText to markdown",
        config=True,
    )

    outdated_text_notebook_margin = Float(
        1.0,
        help="Refuse to overwrite inputs of a ipynb notebooks with those of a "
        "text notebook when the text notebook plus margin is older than "
        "the ipynb notebook (NB: This option is ignored by Jupytext CLI)",
        config=True,
    )

    default_cell_markers = Unicode(
        u"",
        help=
        'Start and end cell markers for the light format, comma separated. Use "{{{,}}}" to mark cells'
        'as foldable regions in Vim, and "region,endregion" to mark cells as Vscode/PyCharm regions',
        config=True,
    )

    notebook_extensions = Unicode(
        u",".join(NOTEBOOK_EXTENSIONS),
        help="A comma separated list of notebook extensions",
        config=True,
    )

    def set_default_format_options(self, format_options, read=False):
        """Set default format option"""
        if self.default_notebook_metadata_filter:
            format_options.setdefault("notebook_metadata_filter",
                                      self.default_notebook_metadata_filter)
        if self.default_cell_metadata_filter:
            format_options.setdefault("cell_metadata_filter",
                                      self.default_cell_metadata_filter)
        if self.comment_magics is not None:
            format_options.setdefault("comment_magics", self.comment_magics)
        if self.split_at_heading:
            format_options.setdefault("split_at_heading",
                                      self.split_at_heading)
        if not read and self.default_cell_markers:
            format_options.setdefault("cell_markers",
                                      self.default_cell_markers)
        if read and self.sphinx_convert_rst2md:
            format_options.setdefault("rst2md", self.sphinx_convert_rst2md)

    def default_formats(self, path):
        """Return the default formats, if they apply to the current path #157"""
        formats = long_form_multiple_formats(self.default_jupytext_formats)
        for fmt in formats:
            try:
                base_path(path, fmt)
                return self.default_jupytext_formats
            except InconsistentPath:
                continue

        return None
Exemplo n.º 20
0
class Place(LockedXmlTraits, UrlContainer):
    """A place that can be visited."""

    data_set_type = UseEnum(
        DataSetType,
        default_value=DataSetType.EARTH).tag(xml=XmlSer.attr("DataSetType"))
    name = Unicode("").tag(xml=XmlSer.attr("Name"))
    ra_hr = Float(0.0).tag(xml=XmlSer.attr("RA"), xml_if_sky_type_is=True)
    dec_deg = Float(0.0).tag(xml=XmlSer.attr("Dec"), xml_if_sky_type_is=True)
    latitude = Float(0.0).tag(xml=XmlSer.attr("Lat"), xml_if_sky_type_is=False)
    longitude = Float(0.0).tag(xml=XmlSer.attr("Lng"),
                               xml_if_sky_type_is=False)
    constellation = UseEnum(Constellation,
                            default_value=Constellation.UNSPECIFIED).tag(
                                xml=XmlSer.attr("Constellation"))
    classification = UseEnum(Classification,
                             default_value=Classification.UNSPECIFIED).tag(
                                 xml=XmlSer.attr("Classification"))
    magnitude = Float(0.0).tag(xml=XmlSer.attr("Magnitude"))
    distance = Float(0.0).tag(xml=XmlSer.attr("Distance"), xml_omit_zero=True)
    angular_size = Float(0.0).tag(xml=XmlSer.attr("AngularSize"))
    zoom_level = Float(0.0).tag(xml=XmlSer.attr("ZoomLevel"))
    rotation_deg = Float(0.0).tag(xml=XmlSer.attr("Rotation"))
    angle = Float(0.0).tag(xml=XmlSer.attr("Angle"))
    opacity = Float(100.0).tag(xml=XmlSer.attr("Opacity"))
    dome_alt = Float(0.0).tag(xml=XmlSer.attr("DomeAlt"), xml_omit_zero=True)
    dome_az = Float(0.0).tag(xml=XmlSer.attr("DomeAz"), xml_omit_zero=True)
    background_image_set = Instance(
        ImageSet,
        allow_none=True).tag(xml=XmlSer.wrapped_inner("BackgroundImageSet"))
    foreground_image_set = Instance(
        ImageSet,
        allow_none=True).tag(xml=XmlSer.wrapped_inner("ForegroundImageSet"))
    image_set = Instance(ImageSet,
                         allow_none=True).tag(xml=XmlSer.inner("ImageSet"))
    thumbnail = Unicode("").tag(xml=XmlSer.attr("Thumbnail"))

    description = Unicode("").tag(xml=XmlSer.text_elem("Description"))
    """
    A description of the place, using HTML markup.

    This field is not actually used in the stock WWT clients, but it is wired up
    and loaded from the XML.
    """

    annotation = Unicode("").tag(xml=XmlSer.attr("Annotation"))
    """
    Annotation metadata for the place.

    This field is only used in the web engine and web client app. The web client
    app expects this field to contain a comma-separated list of key-value pairs,
    where each pair is delimited with colons:

    .. code-block::

        key1:val1,key2:val2,key3:val3

    The webclient includes some unfinished support for this field to be used to
    create circular annotations with YouTube video links. If your WTML file will
    not be viewed in the webclient, you can use this field to convey arbitrary
    textual data to the WWT Web Engine JavaScript/TypeScript layer.

    """

    msr_community_id = Int(0).tag(xml=XmlSer.attr("MSRCommunityId"),
                                  xml_omit_zero=True)
    """The ID number of the WWT Community that this content came from."""

    msr_component_id = Int(0).tag(xml=XmlSer.attr("MSRComponentId"),
                                  xml_omit_zero=True)
    """The ID number of this content item on the WWT Communities system."""

    permission = Int(0).tag(xml=XmlSer.attr("Permission"), xml_omit_zero=True)
    "TBD."

    xmeta = Instance(
        Namespace,
        args=(),
        help=
        "XML metadata - a namespace object for attaching arbitrary text to serialize",
    ).tag(xml=XmlSer.ns_to_attr("X"))

    def _tag_name(self):
        return "Place"

    def mutate_urls(self, mutator):
        if self.thumbnail:
            self.thumbnail = mutator(self.thumbnail)

        if self.background_image_set:
            self.background_image_set.mutate_urls(mutator)

        if self.foreground_image_set:
            self.foreground_image_set.mutate_urls(mutator)

        if self.image_set:
            self.image_set.mutate_urls(mutator)

    def as_imageset(self):
        """Return an ImageSet for this place if one is defined.

        Returns
        -------
        Either :class:`wwt_data_formats.imageset.ImageSet` or None.

        Notes
        -----
        If the :attr:`foreground_image_set` of this :class:`Place` is not
        None, it is returned. Otherwise, if its :attr:`image_set` is not
        None, that is returned. Otherwise, None is returned.

        """
        if self.foreground_image_set is not None:
            return self.foreground_image_set
        return self.image_set
class MolViz3D(MessageWidget):
    """
    Draws molecules in 3D with 3DMol.js
    """
    height = Unicode(sync=True)
    width = Unicode(sync=True)

    _view_name = Unicode('MolWidget3DView').tag(sync=True)
    _model_name = Unicode('MolWidget3DModel').tag(sync=True)
    _view_module = Unicode('nbmolviz-js').tag(sync=True)
    _model_module = Unicode('nbmolviz-js').tag(sync=True)
    atom_labels_shown = Bool(False).tag(sync=True)
    background_color = Unicode('#73757C').tag(sync=True)
    background_opacity = Float(1.0).tag(sync=True)
    model_data = Dict({}).tag(sync=True)
    orbital = Dict({}).tag(sync=True)
    selected_atom_indices = Set(set()).tag(sync=True)
    selection_type = Unicode('Atom').tag(sync=True)
    shapes = List([]).tag(sync=True)
    styles = Dict({}).tag(sync=True)

    SHAPE_NAMES = {
        'SPHERE': 'Sphere',
        'ARROW': 'Arrow',
        'CYLINDER': 'Cylinder',
    }

    STYLE_NAMES = {
        'vdw': 'sphere',
        'licorice': 'stick',
        'line': 'line',
        'ribbon': 'cartoon',
        None: None
    }

    STYLE_SYNONYMS = {
        'vdw': 'vdw',
        'sphere': 'vdw',
        'cpk': 'vdw',
        'licorice': 'licorice',
        'stick': 'licorice',
        'tube': 'licorice',
        'ball_and_stick': 'ball_and_stick',
        'line': 'line',
        'cartoon': 'ribbon',
        'ribbon': 'ribbon',
        None: None,
        'hide': None,
        'invisible': None,
        'remove': None
    }

    def __init__(self, mol=None, width=500, height=350, **kwargs):
        kwargs.update(width=width, height=height)
        super(MolViz3D, self).__init__(**kwargs)
        self.height = in_pixels(height)
        self.width = in_pixels(width)

        # current state
        self.num_frames = 1
        self.current_frame = 0
        self.current_orbital = None

        # add the new molecule if necessary
        if mol is not None: self.add_molecule(mol)
        self.current_orbital = None
        self.orbital_spec = {}
        self._cached_cubefiles = {}
        self._clicks_enabled = False

    # Utilities
    def convert_style_name(self, name):
        canonical_name = self.STYLE_SYNONYMS[name]
        if canonical_name in self.STYLE_NAMES:
            return self.STYLE_NAMES[canonical_name]
        else:
            return canonical_name

    # Standard view actions
    def add_molecule(self, mol):
        self.mol = mol
        self.model_data = convert_to_json(self.mol)

    def set_background_color(self, color, opacity=1.0):
        color = translate_color(color)
        self.background_color = color
        self.background_opacity = opacity

    def set_clipping(self, near, far):
        self.viewer('setSlab', [float(near), float(far)])

    def set_color(self, color, atoms):
        self.styles = MolViz3D.get_styles_for_color(color, atoms, self.styles)

    #some convenience synonyms
    def sphere(self, **kwargs):
        return self.add_style('vdw', **kwargs)

    vdw = cpk = sphere

    def ball_and_stick(self, **kwargs):
        return self.add_style('ball_and_stick', **kwargs)

    def licorice(self, **kwargs):
        return self.add_style('licorice', **kwargs)

    stick = tube = licorice

    def line(self, **kwargs):
        return self.add_style('line', **kwargs)

    def ribbon(self, **kwargs):
        return self.add_style('cartoon', **kwargs)

    cartoon = ribbon

    def hide(self, atoms=None):
        return self.add_style(None, atoms=atoms)

    invisible = hide

    # Returns new styles after updating the given atoms with the given color
    @staticmethod
    def get_styles_for_color(color, atoms, styles):
        styles = dict(styles)

        if not atoms:
            return styles

        for atom in atoms:
            if str(atom.index) in styles:
                styles[str(atom.index)] = dict(styles[str(atom.index)])
            else:
                styles[str(atom.index)] = {}
            styles[str(atom.index)]['color'] = color

        return styles

    def set_colors(self, colormap):
        """
        Args:
         colormap(Mapping[str,List[Atoms]]): mapping of colors to atoms
        """
        styles = dict(self.styles)
        for color, atoms in colormap.iteritems():
            styles = MolViz3D.get_styles_for_color(color, atoms, styles)

        self.styles = styles

    def unset_color(self, atoms=None):
        if atoms is None:
            atom_json = {}
        else:
            atom_json = self._atoms_to_json(atoms)
        self.viewer('unsetAtomColor',
                    [atom_json])  # TODO: remove calls to self.viewer

    def set_style(self, style, atoms=None, **options):
        self._change_style(style, atoms, True, options)

    def add_style(self, style, atoms=None, **options):
        self._change_style(style, atoms, False, options)

    def _change_style(self, style_string, atoms, replace, options):
        style = self.convert_style_name(style_string)

        # No atoms passed means all atoms
        if atoms is None:
            atoms = self.mol.atoms
        atoms = list(atoms)

        if replace:
            styles = dict()
        else:
            styles = dict(self.styles)

        for i, atom in enumerate(self.mol.atoms):
            for j in range(0, len(atoms)):
                if (atoms[j] is atom):
                    newStyle = styles[str(
                        atom.index)].copy() if i in styles else {}
                    newStyle['visualization_type'] = style
                    if 'color' in options:
                        newStyle['color'] = options['color']
                    styles[str(atom.index)] = newStyle
                    atoms.remove(atoms[j])
                    break

        self.styles = styles

    def append_frame(self, positions=None):
        if positions is None:
            positions = self.get_positions()

        positions = self._convert_units(positions)
        try:
            positions = positions.tolist()
        except AttributeError:
            pass

        self.num_frames += 1
        self.viewer('addFrameFromList', args=[positions])
        self.show_frame(self.num_frames - 1)

    def set_positions(self, positions=None):
        from moldesign import units as u
        if positions is not None:
            for i, atom in enumerate(self.mol.atoms):
                atom.position[0] = positions[i][0] * u.angstrom
                atom.position[1] = positions[i][1] * u.angstrom
                atom.position[2] = positions[i][2] * u.angstrom
        self.model_data = convert_to_json(self.mol)

    def show_frame(self, framenum):
        self.viewer('setFrame', [framenum])
        self.current_frame = framenum
        if self.current_orbital is not None:
            self.draw_orbital(self.current_orbital, **self.orbital_spec)

    #Shapes
    @staticmethod
    def _list_to_jsvec(vec):
        assert len(vec) == 3
        return dict(x=vec[0], y=vec[1], z=vec[2])

    # TODO this contains unused parameters and code due to removed functionality, is it needed?
    def draw_sphere(self,
                    position,
                    radius=2.0,
                    color='red',
                    opacity=1.0,
                    clickable=False):
        position = self._convert_units(position)
        radius = self._convert_units(radius)
        center = dict(x=position[0], y=position[1], z=position[2])
        color = translate_color(color)

        shape = {
            'type': self.SHAPE_NAMES['SPHERE'],
            'center': center,
            'radius': radius,
            'color': color,
            'opacity': opacity,
        }
        shapes = list(self.shapes)
        shapes.append(shape)
        self.shapes = shapes
        return shape

    def draw_circle(self,
                    center,
                    normal,
                    radius,
                    color='red',
                    opacity=0.8,
                    clickable=False,
                    batch=False):
        # TODO: this doesn't work! appears to be a bug in 3dmol.js
        # return self._draw3dmol_cylinder(color, center,
        #                                np.array(center) + np.array(normal) * 0.01,
        #                                True, False,
        #                                opacity,
        #                                radius)
        return self._draw3dmol_cylinder(
            color, center,
            np.array(center) + np.array(normal) * 0.01, True, True, opacity,
            radius, clickable, batch)

    def draw_cylinder(self,
                      start,
                      end,
                      radius,
                      color='red',
                      opacity=1.0,
                      clickable=False,
                      batch=False):
        return self._draw3dmol_cylinder(color, start, end, True, True, opacity,
                                        radius, clickable, batch)

    def draw_tube(self,
                  start,
                  end,
                  radius,
                  color='red',
                  opacity=1.0,
                  clickable=False,
                  batch=False):
        return self._draw3dmol_cylinder(color, start, end, False, False,
                                        opacity, radius, clickable, batch)

    # TODO this contains unused parameters and code due to removed functionality, is it needed?
    def _draw3dmol_cylinder(self, color, start, end, draw_start_face,
                            draw_end_face, opacity, radius, clickable, batch):
        color = translate_color(color)
        facestart = self._convert_units(start)
        faceend = self._convert_units(end)
        radius = self._convert_units(radius)
        spec = dict(start=self._list_to_jsvec(facestart),
                    end=self._list_to_jsvec(faceend),
                    radius=radius,
                    color=color,
                    alpha=opacity,
                    fromCap=draw_start_face,
                    toCap=draw_end_face)

        shape = {
            'type': self.SHAPE_NAMES['CYLINDER'],
            'start': self._list_to_jsvec(facestart),
            'end': self._list_to_jsvec(faceend),
            'radius': radius,
            'color': color,
            'opacity': opacity,
        }
        shapes = list(self.shapes)
        shapes.append(shape)
        self.shapes = shapes
        return shape

    # TODO this contains unused parameters and code due to removed functionality, is it needed?
    def draw_arrow(self,
                   start,
                   end=None,
                   vector=None,
                   radius=0.15,
                   color='red',
                   opacity=1.0,
                   clickable=False):
        if (end is None) == (vector is None):
            raise ValueError(
                "Either 'end' or 'vector' should be passed, but not both.")
        if end is None: end = np.array(start) + np.array(vector)
        facestart = self._convert_units(start)
        faceend = self._convert_units(end)
        color = translate_color(color)

        spec = dict(start=self._list_to_jsvec(facestart),
                    end=self._list_to_jsvec(faceend),
                    radius=radius,
                    color=color,
                    alpha=opacity)

        shape = {
            'type': self.SHAPE_NAMES['ARROW'],
            'start': self._list_to_jsvec(facestart),
            'end': self._list_to_jsvec(faceend),
            'color': color,
            'opacity': opacity,
        }
        shapes = list(self.shapes)
        shapes.append(shape)
        self.shapes = shapes
        return shape

    def remove_all_shapes(self):
        self.shapes = list()

    def remove(self, obj, batch=False):
        if obj in self.shapes:
            shapes = list(self.shapes)
            shapes.remove(obj)
            self.shapes = shapes
        elif obj.type == 'label':
            self.atom_labels_shown = False
        else:
            raise ValueError('Unknown object type %s' % obj['type'])

    # Labels
    def draw_label(self,
                   position,
                   text,
                   background='black',
                   border='black',
                   color='white',
                   fontsize=14,
                   opacity=1.0):
        js_label = JSObject('label')
        position = self._convert_units(position)
        color = translate_color(color)
        background = translate_color(background)
        spec = dict(position=self._list_to_jsvec(position),
                    fontColor=color,
                    backgroundColor=background,
                    borderColor=border,
                    fontSize=fontsize,
                    backgroundOpacity=opacity)
        self.viewer('renderPyLabel', [text, spec, js_label.id])
        return js_label

    def remove_all_labels(self):
        self.viewer('removeAllLabels', [])

    def select_residues(self, residues):
        selected_atom_indices = set()

        for residue in residues:
            for atom in residues.atoms:
                selected_atom_indices.add(atom.index)

        self.selected_atom_indices = selected_atom_indices

    def toggle_residues(self, residues):
        selected_atom_indices = set(self.selected_atom_indices)

        for residue in residues:
            for atom in residue.atoms:
                if atom.index in selected_atom_indices:
                    selected_atom_indices.remove(atom.index)
                else:
                    selected_atom_indices.add(atom.index)

        self.selected_atom_indices = selected_atom_indices

    def get_selected_bonds(self, *args, **kwargs):
        atomIndices = kwargs.get('atomIndices', self.selected_atom_indices)
        bonds = set()

        for bond in self.mol.bonds:
            if bond.a1.index in atomIndices and bond.a2.index in atomIndices:
                bonds.add(bond)

        return bonds

    def get_cubefile(self, orbname, npts, framenum):
        orbital_key = (orbname, npts, framenum)

        if orbital_key not in self._cached_cubefiles:
            grid = self.calc_orb_grid(orbname, npts, framenum)
            cubefile = self._grid_to_cube(grid)
            self._cached_cubefiles[orbital_key] = cubefile
        else:
            cubefile = self._cached_cubefiles[orbital_key]

        return cubefile

    def draw_orbital(self,
                     orbname,
                     npts=50,
                     isoval=0.01,
                     opacity=0.8,
                     negative_color='red',
                     positive_color='blue'):
        """Display a molecular orbital

        Args:
            orbname: name of the orbital (interface dependent)
            npts (int): resolution in each dimension
            isoval (float): isosurface value to draw
            opacity (float): opacity of the orbital (between 0 and 1)
            positive_color (str or int): color of the positive isosurfaces
            negative_color (str or int): color of the negative isosurfaces
        """
        self.orbital_spec = dict(npts=npts,
                                 isoval=isoval,
                                 opacity=opacity,
                                 negative_color=negative_color,
                                 positive_color=positive_color)
        self.current_orbital = orbname

        positive_color = translate_color(positive_color)
        negative_color = translate_color(negative_color)

        orbidx = self.get_orbidx(orbname)
        cubefile = self.get_cubefile(orbidx, npts, self.current_frame)
        self.orbital = {
            'cube_file': cubefile,
            'iso_val': isoval,
            'opacity': opacity
        }

    def get_orbidx(self, orbname):
        try:
            if orbname.lower().strip() == 'h**o':
                orbname = self.homo_index()
            elif orbname.lower().strip() == 'lumo':
                orbname = self.homo_index() + 1
        except AttributeError:
            pass
        return orbname

    def get_orbnames(self):
        raise NotImplementedError

    @staticmethod
    def _grid_to_cube(grid, f=None):
        if f is None:
            fobj = StringIO()
        elif not hasattr(f, 'write'):
            fobj = open(f, 'w')
        else:
            fobj = f

        # First two header lines
        print >> fobj, 'CUBE File\nGenerated by nbmolviz'
        # third line: number of atoms (0, here) + origin of grid
        print >> fobj, '-1 %f %f %f' % grid.origin()
        # lines 4-7: number of points in each direction and basis vector for each
        # basis vectors are negative to indicate angstroms
        print >> fobj, '%d %f 0.0 0.0' % (-grid.npoints, grid.dx)
        print >> fobj, '%d 0.0 %f 0.0' % (-grid.npoints, grid.dy)
        print >> fobj, '%d 0.0 0.0 %f' % (-grid.npoints, grid.dz)
        # Next is a line per atom
        # We put just one atom here - it shouldn't be rendered
        print >> fobj, '6 0.000 0.0 0.0 0.0'
        # Next, indicate that there's just one orbital
        print >> fobj, 1, 1
        # finally, write out all the grid values
        # ival = 0
        for ix in xrange(grid.npoints):
            for iy in xrange(grid.npoints):
                for iz in xrange(grid.npoints):
                    print >> fobj, grid.fxyz[ix, iy, iz],
                    # ival += 1
                    # if ival%6 == 0: print >> fobj #newline
                    if iz % 6 == 5: print >> fobj
                print >> fobj

        if f is None:
            fobj.seek(0)
            return fobj.getvalue()
        else:
            fobj.close()
Exemplo n.º 22
0
class DisplayHook(Configurable):
    """The custom IPython displayhook to replace sys.displayhook.

    This class does many things, but the basic idea is that it is a callable
    that gets called anytime user code returns a value.
    """

    shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
                     allow_none=True)
    exec_result = Instance('IPython.core.interactiveshell.ExecutionResult',
                           allow_none=True)
    cull_fraction = Float(0.2)

    def __init__(self, shell=None, cache_size=1000, **kwargs):
        super(DisplayHook, self).__init__(shell=shell, **kwargs)
        cache_size_min = 3
        if cache_size <= 0:
            self.do_full_cache = 0
            cache_size = 0
        elif cache_size < cache_size_min:
            self.do_full_cache = 0
            cache_size = 0
            warn('caching was disabled (min value for cache size is %s).' %
                 cache_size_min,
                 stacklevel=3)
        else:
            self.do_full_cache = 1

        self.cache_size = cache_size

        # we need a reference to the user-level namespace
        self.shell = shell

        self._, self.__, self.___ = '', '', ''

        # these are deliberately global:
        to_user_ns = {'_': self._, '__': self.__, '___': self.___}
        self.shell.user_ns.update(to_user_ns)

    @property
    def prompt_count(self):
        return self.shell.execution_count

    #-------------------------------------------------------------------------
    # Methods used in __call__. Override these methods to modify the behavior
    # of the displayhook.
    #-------------------------------------------------------------------------

    def check_for_underscore(self):
        """Check if the user has set the '_' variable by hand."""
        # If something injected a '_' variable in __builtin__, delete
        # ipython's automatic one so we don't clobber that.  gettext() in
        # particular uses _, so we need to stay away from it.
        if '_' in builtin_mod.__dict__:
            try:
                user_value = self.shell.user_ns['_']
                if user_value is not self._:
                    return
                del self.shell.user_ns['_']
            except KeyError:
                pass

    def quiet(self):
        """Should we silence the display hook because of ';'?"""
        # do not print output if input ends in ';'

        try:
            cell = self.shell.history_manager.input_hist_parsed[-1]
        except IndexError:
            # some uses of ipshellembed may fail here
            return False

        sio = _io.StringIO(cell)
        tokens = list(tokenize.generate_tokens(sio.readline))

        for token in reversed(tokens):
            if token[0] in (tokenize.ENDMARKER, tokenize.NL, tokenize.NEWLINE,
                            tokenize.COMMENT):
                continue
            if (token[0] == tokenize.OP) and (token[1] == ';'):
                return True
            else:
                return False

    def start_displayhook(self):
        """Start the displayhook, initializing resources."""
        pass

    def write_output_prompt(self):
        """Write the output prompt.

        The default implementation simply writes the prompt to
        ``sys.stdout``.
        """
        # Use write, not print which adds an extra space.
        sys.stdout.write(self.shell.separate_out)
        outprompt = 'Out[{}]: '.format(self.shell.execution_count)
        if self.do_full_cache:
            sys.stdout.write(outprompt)

    def compute_format_data(self, result):
        """Compute format data of the object to be displayed.

        The format data is a generalization of the :func:`repr` of an object.
        In the default implementation the format data is a :class:`dict` of
        key value pair where the keys are valid MIME types and the values
        are JSON'able data structure containing the raw data for that MIME
        type. It is up to frontends to determine pick a MIME to to use and
        display that data in an appropriate manner.

        This method only computes the format data for the object and should
        NOT actually print or write that to a stream.

        Parameters
        ----------
        result : object
            The Python object passed to the display hook, whose format will be
            computed.

        Returns
        -------
        (format_dict, md_dict) : dict
            format_dict is a :class:`dict` whose keys are valid MIME types and values are
            JSON'able raw data for that MIME type. It is recommended that
            all return values of this should always include the "text/plain"
            MIME type representation of the object.
            md_dict is a :class:`dict` with the same MIME type keys
            of metadata associated with each output.

        """
        return self.shell.display_formatter.format(result)

    # This can be set to True by the write_output_prompt method in a subclass
    prompt_end_newline = False

    def write_format_data(self, format_dict, md_dict=None) -> None:
        """Write the format data dict to the frontend.

        This default version of this method simply writes the plain text
        representation of the object to ``sys.stdout``. Subclasses should
        override this method to send the entire `format_dict` to the
        frontends.

        Parameters
        ----------
        format_dict : dict
            The format dict for the object passed to `sys.displayhook`.
        md_dict : dict (optional)
            The metadata dict to be associated with the display data.
        """
        if 'text/plain' not in format_dict:
            # nothing to do
            return
        # We want to print because we want to always make sure we have a
        # newline, even if all the prompt separators are ''. This is the
        # standard IPython behavior.
        result_repr = format_dict['text/plain']
        if '\n' in result_repr:
            # So that multi-line strings line up with the left column of
            # the screen, instead of having the output prompt mess up
            # their first line.
            # We use the prompt template instead of the expanded prompt
            # because the expansion may add ANSI escapes that will interfere
            # with our ability to determine whether or not we should add
            # a newline.
            if not self.prompt_end_newline:
                # But avoid extraneous empty lines.
                result_repr = '\n' + result_repr

        try:
            print(result_repr)
        except UnicodeEncodeError:
            # If a character is not supported by the terminal encoding replace
            # it with its \u or \x representation
            print(
                result_repr.encode(sys.stdout.encoding,
                                   'backslashreplace').decode(
                                       sys.stdout.encoding))

    def update_user_ns(self, result):
        """Update user_ns with various things like _, __, _1, etc."""

        # Avoid recursive reference when displaying _oh/Out
        if self.cache_size and result is not self.shell.user_ns['_oh']:
            if len(self.shell.user_ns['_oh']
                   ) >= self.cache_size and self.do_full_cache:
                self.cull_cache()

            # Don't overwrite '_' and friends if '_' is in __builtin__
            # (otherwise we cause buggy behavior for things like gettext). and
            # do not overwrite _, __ or ___ if one of these has been assigned
            # by the user.
            update_unders = True
            for unders in ['_' * i for i in range(1, 4)]:
                if not unders in self.shell.user_ns:
                    continue
                if getattr(self, unders) is not self.shell.user_ns.get(unders):
                    update_unders = False

            self.___ = self.__
            self.__ = self._
            self._ = result

            if ('_' not in builtin_mod.__dict__) and (update_unders):
                self.shell.push({
                    '_': self._,
                    '__': self.__,
                    '___': self.___
                },
                                interactive=False)

            # hackish access to top-level  namespace to create _1,_2... dynamically
            to_main = {}
            if self.do_full_cache:
                new_result = '_%s' % self.prompt_count
                to_main[new_result] = result
                self.shell.push(to_main, interactive=False)
                self.shell.user_ns['_oh'][self.prompt_count] = result

    def fill_exec_result(self, result):
        if self.exec_result is not None:
            self.exec_result.result = result

    def log_output(self, format_dict):
        """Log the output."""
        if 'text/plain' not in format_dict:
            # nothing to do
            return
        if self.shell.logger.log_output:
            self.shell.logger.log_write(format_dict['text/plain'], 'output')
        self.shell.history_manager.output_hist_reprs[self.prompt_count] = \
                                                    format_dict['text/plain']

    def finish_displayhook(self):
        """Finish up all displayhook activities."""
        sys.stdout.write(self.shell.separate_out2)
        sys.stdout.flush()

    def __call__(self, result=None):
        """Printing with history cache management.

        This is invoked every time the interpreter needs to print, and is
        activated by setting the variable sys.displayhook to it.
        """
        self.check_for_underscore()
        if result is not None and not self.quiet():
            self.start_displayhook()
            self.write_output_prompt()
            format_dict, md_dict = self.compute_format_data(result)
            self.update_user_ns(result)
            self.fill_exec_result(result)
            if format_dict:
                self.write_format_data(format_dict, md_dict)
                self.log_output(format_dict)
            self.finish_displayhook()

    def cull_cache(self):
        """Output cache is full, cull the oldest entries"""
        oh = self.shell.user_ns.get('_oh', {})
        sz = len(oh)
        cull_count = max(int(sz * self.cull_fraction), 2)
        warn('Output cache limit (currently {sz} entries) hit.\n'
             'Flushing oldest {cull_count} entries.'.format(
                 sz=sz, cull_count=cull_count))

        for i, n in enumerate(sorted(oh)):
            if i >= cull_count:
                break
            self.shell.user_ns.pop('_%i' % n, None)
            oh.pop(n, None)

    def flush(self):
        if not self.do_full_cache:
            raise ValueError("You shouldn't have reached the cache flush "
                             "if full caching is not enabled!")
        # delete auto-generated vars from global namespace

        for n in range(1, self.prompt_count + 1):
            key = '_' + repr(n)
            try:
                del self.shell.user_ns[key]
            except:
                pass
        # In some embedded circumstances, the user_ns doesn't have the
        # '_oh' key set up.
        oh = self.shell.user_ns.get('_oh', None)
        if oh is not None:
            oh.clear()

        # Release our own references to objects:
        self._, self.__, self.___ = '', '', ''

        if '_' not in builtin_mod.__dict__:
            self.shell.user_ns.update({
                '_': self._,
                '__': self.__,
                '___': self.___
            })
        import gc
        # TODO: Is this really needed?
        # IronPython blocks here forever
        if sys.platform != "cli":
            gc.collect()
Exemplo n.º 23
0
class RasterLayer(Layer):
    _view_name = Unicode('LeafletRasterLayerView').tag(sync=True)
    _model_name = Unicode('LeafletRasterLayerModel').tag(sync=True)

    opacity = Float(1.0, min=0.0, max=1.0).tag(sync=True)
    visible = Bool(True).tag(sync=True)
Exemplo n.º 24
0
class Figure(DOMWidget):

    """Main canvas for drawing a chart.

    The Figure object holds the list of Marks and Axes. It also holds an
    optional Interaction object that is responsible for figure-level mouse
    interactions, the "interaction layer".

    Besides, the Figure object has two reference scales, for positioning items
    in an absolute fashion in the figure canvas.

    Attributes
    ----------
    title: string (default: '')
        title of the figure
    axes: List of Axes (default: [])
        list containing the instances of the axes for the figure
    marks: List of Marks (default: [])
        list containing the marks which are to be appended to the figure
    interaction: Interaction or None (default: )
        optional interaction layer for the figure
    scale_x: Scale
        Scale representing the x values of the figure
    scale_y: Scale
        Scale representing the y values of the figure
    padding_x: Float (default: 0.0)
        Padding to be applied in the horizontal direction of the figure
        around the data points, proportion of the horizontal length
    padding_y: Float (default: 0.025)
        Padding to be applied in the vertical direction of the figure
        around the data points, proportion of the vertical length
    legend_location: {'top-right', 'top', 'top-left', 'left', 'bottom-left', 'bottom', 'bottom-right', 'right'}
        location of the legend relative to the center of the figure
    background_style: Dict (default: {})
        CSS style to be applied to the background of the figure
    title_style: Dict (default: {})
        CSS style to be applied to the title of the figure
    animation_duration: nonnegative int (default: 0)
        Duration of transition on change of data attributes, in milliseconds.

    Layout Attributes

    fig_margin: dict (default: {top=60, bottom=60, left=60, right=60})
        Dictionary containing the top, bottom, left and right margins. The user
        is responsible for making sure that the width and height are greater
        than the sum of the margins.
    min_aspect_ratio: float
         minimum width / height ratio of the figure
    max_aspect_ratio: float
         maximum width / height ratio of the figure

    Methods
    -------

    save_png:
       Saves the figure as a png file

    Note
    ----

    The aspect ratios stand for width / height ratios.

     - If the available space is within bounds in terms of min and max aspect
       ratio, we use the entire available space.
     - If the available space is too oblong horizontally, we use the client
       height and the width that corresponds max_aspect_ratio (maximize width
       under the constraints).
     - If the available space is too oblong vertically, we use the client width
       and the height that corresponds to min_aspect_ratio (maximize height
       under the constraint).
       This corresponds to maximizing the area under the constraints.

    Default min and max aspect ratio are both equal to 16 / 9.
    """
    title = Unicode().tag(sync=True, display_name='Title')
    axes = List(Instance(Axis)).tag(sync=True, **widget_serialization)
    marks = List(Instance(Mark)).tag(sync=True, **widget_serialization)
    interaction = Instance(Interaction, default_value=None, allow_none=True).tag(sync=True,
                           **widget_serialization)
    scale_x = Instance(Scale).tag(sync=True, **widget_serialization)
    scale_y = Instance(Scale).tag(sync=True, **widget_serialization)
    title_style = Dict(trait=Unicode()).tag(sync=True)
    background_style = Dict().tag(sync=True)

    layout = Instance(Layout, kw={
            'flex': '1',
            'align_self': 'stretch',
            'min_width': '400px'
        }, allow_none=True).tag(sync=True, **widget_serialization)
    min_aspect_ratio = Float(16.0 / 9.0).tag(sync=True)
    max_aspect_ratio = Float(16.0 / 9.0).tag(sync=True)

    fig_margin = Dict(dict(top=60, bottom=60, left=60, right=60)).tag(sync=True)
    padding_x = Float(0.0, min=0.0, max=1.0).tag(sync=True)
    padding_y = Float(0.025, min=0.0, max=1.0).tag(sync=True)
    legend_location = Enum(['top-right', 'top', 'top-left', 'left',
                            'bottom-left', 'bottom', 'bottom-right', 'right'],
                           default_value='top-right').tag(sync=True, display_name='Legend position')
    animation_duration = Int().tag(sync=True, display_name='Animation duration')

    @default('scale_x')
    def _default_scale_x(self):
        return LinearScale(min=0, max=1, allow_padding=False)

    @default('scale_y')
    def _default_scale_y(self):
        return LinearScale(min=0, max=1, allow_padding=False)

    def save_png(self):
        self.send({"type": "save_png"})

    @validate('min_aspect_ratio', 'max_aspect_ratio')
    def _validate_aspect_ratio(self, proposal):
        value = proposal['value']
        if proposal['trait'].name == 'min_aspect_ratio' and value > self.max_aspect_ratio:
            raise TraitError('setting min_aspect_ratio > max_aspect_ratio')
        if proposal['trait'].name == 'max_aspect_ratio' and value < self.min_aspect_ratio:
            raise TraitError('setting max_aspect_ratio < min_aspect_ratio')
        return value

    _view_name = Unicode('Figure').tag(sync=True)
    _model_name = Unicode('FigureModel').tag(sync=True)
    _view_module = Unicode('bqplot').tag(sync=True)
    _model_module = Unicode('bqplot').tag(sync=True)
Exemplo n.º 25
0
class ImageSlider(base.DOMWidget):
    """The backend python class for the custom ImageSlider widget.

    This class declares and initializes all of the data that is synced
        between the front- and back-ends of the widget code.
        It also provides the majority of the calculation-based code
        that runs the ImageSlider widget."""

    _view_name = Unicode("ImgSliderView").tag(sync=True)
    _model_name = Unicode("ImgSliderModel").tag(sync=True)

    # public attrs
    height = Integer().tag(sync=True)
    width = Integer().tag(sync=True)

    # index of current image in display
    _img_index = Integer(0).tag(sync=True)
    _N_images = Integer().tag(sync=True)

    _b64value = Unicode().tag(sync=True)
    _err = Unicode().tag(sync=True)
    _format = Unicode("png").tag(sync=True)
    _series_min = Float().tag(sync=True)
    _series_max = Float().tag(sync=True)
    _img_min = Float().tag(sync=True)
    _img_max = Float().tag(sync=True)
    _nrows = Integer().tag(sync=True)
    _ncols = Integer().tag(sync=True)
    _offsetX = Integer().tag(sync=True)
    _offsetY = Integer().tag(sync=True)
    _pix_val = Float().tag(sync=True)

    # These variables were added to support zoom functionality
    _ROI = Tuple((0, 0, 0, 0), sync=True)  # Xtop, Ytop, Xbottom, Ybottom
    _extrarows = Integer(0).tag(sync=True)
    _extracols = Integer(0).tag(sync=True)
    _nrows_currimg = Integer().tag(sync=True)
    _ncols_currimg = Integer().tag(sync=True)
    _xcoord_absolute = Integer(0).tag(sync=True)
    _ycoord_absolute = Integer(0).tag(sync=True)
    _vslide_reset = Integer(0).tag(sync=True)
    _xcoord_max_roi = Integer().tag(sync=True)
    _ycoord_max_roi = Integer().tag(sync=True)

    def __init__(self, image_series, width, height):
        """Constructor method for setting the necessary member variables
           that are synced between the front- and back-ends.

        Creates the following non-synced member variables:

            *image_series: the list containing the original series of image objects
                 passed by the image_series parameter.
                 This variable is not changed in the code to preserve the original data.
            *current_img: the image object or corresponding numpy array of data
                 that is currently being displayed
            *arr: a numpy array containing the data for the current image
                 that does not contain buffer rows/columns
            *curr_img_data: a numpy array containing the data for the current image,
                 including buffer rows/columns
            *xbuff and ybuff: the number of buffer rows in the previously displayed image

        Parameters:

            *image_series: a list of ImageFile objects (see
                 https://github.com/ornlneutronimaging/iMars3D/blob/master/python/imars3d/ImageFile.py
                 for more details).
                 This list is used to give the widget access
                 to the images that are to be viewed.
            *width: an integer that is used to set the width of the image and UI elements.
            *height: an integer that is used to set the height of the image and UI elements."""

        super(ImageSlider, self).__init__()
        assert len(image_series), "Image series cannot be empty"
        self.image_series = list(image_series)
        self.width = width
        self.height = height
        self._N_images = len(self.image_series)
        self.current_img = self.image_series[self._img_index]
        # image data array. need it to obtain the value at mouse p
        self.arr = self.current_img.data.copy().astype("float")
        # image data in the <img> tag. this may contains buffers at zoom, or may be altered due to
        # intensity range limit
        self.curr_img_data = self.arr.copy()
        self._nrows, self._ncols = self.arr.shape
        self._nrows_currimg, self._ncols_currimg = self.arr.shape
        self._ycoord_max_roi, self._xcoord_max_roi = self.arr.shape
        self.ybuff = 0
        self.xbuff = 0
        self._zoom = False
        self.get_series_minmax()
        self.update_image_div_data(None)
        return

    def get_series_minmax(self, sample_size=10):
        """Determines the absolute minimum and maximum image values of either
               all the images in self.image_series or of
               'sample_size' random images from self.image_series

        Parameters:
            *sample_size: the maximum number of images to use
                 in determining _img_min and _img_max.
                 By default, its value is 10."""

        img_series = list(self.image_series)
        N = len(img_series)
        if N < sample_size:
            data = [img.data for img in img_series]
        else:
            indexes = np.random.choice(N, sample_size, replace=False)
            data = [img_series[i].data for i in indexes]
        self._series_min = self._img_min = float(np.min(data))
        self._series_max = self._img_max = float(np.max(data))
        return

    #This function is called when the values of _offsetX and/or _offsetY change
    @observe("_offsetX", "_offsetY")
    def get_val(self, change):
        """Tries to calculate the value of the image at the mouse position
               and store the result in the member variable _pix_val

        If an error occurs, this method calls the handle_error method
            and stores the result in the member variable _err."""

        try:
            col = int(self._offsetX / self.width * self._ncols_currimg)
            row = int(self._offsetY / self.height * self._nrows_currimg)
            if self._extrarows != 0:
                row = row - self.ybuff
            if self._extracols != 0:
                col = col - self.xbuff
            if col >= self.arr.shape[1]:
                col = self.arr.shape[1] - 1
            if row >= self.arr.shape[0]:
                row = self.arr.shape[0] - 1
            self._pix_val = float(self.arr[row, col])
            self._err = ""
        except Exception:
            self._pix_val = float(np.nan)
            self._err = self.handle_error()
            return

    def getimg_bytes(self):
        """Encodes the data for the currently viewed image into Base64.

        If _img_min and/or _img_max have been changed from their initial values,
            this function will also change the image data to account for
            this change before encoding the data into Base64."""
        # force the intensity range limitation to be positive
        if self._img_min >= self._img_max:
            self._img_max = self._img_min + (self._series_max -
                                             self._series_min) * 1e-5
        # apply intensity range
        self.curr_img_data[self.curr_img_data < self._img_min] = self._img_min
        self.curr_img_data[self.curr_img_data > self._img_max] = self._img_max
        img = ((self.curr_img_data - self._img_min) /
               (self._img_max - self._img_min) * (2**8 - 1)).astype('uint8')
        size = np.max(img.shape)
        view_size = np.max((self.width, self.height))
        # resample if necessary
        resample_ratio = view_size / size
        if resample_ratio != 1.:
            import scipy.misc
            img = scipy.misc.imresize(img, resample_ratio)
        """Allows the correct string IO module to be used
               based on the version of Python.
           Once support for Python 2.7 ends, this if-else statement
               can be replaced by just the contents of the else statement."""
        if sys.version_info < (3, 0):
            from cStringIO import StringIO
            f = StringIO()
        else:
            from io import BytesIO
            f = BytesIO()
        import PIL.Image, base64
        PIL.Image.fromarray(img).save(f, self._format)
        imgb64v = base64.b64encode(f.getvalue())
        return imgb64v

    def handle_error(self):
        """Creates and returns a custom error message if an error occurs in the get_val method."""

        cla, exc, tb = sys.exc_info()
        ex_name = cla.__name__
        try:
            ex_args = exc.__dict__["args"]
        except KeyError:
            ex_args = ("No args", )
        ex_mess = str(ex_name)
        for arg in ex_args:
            ex_mess = ex_mess + str(arg)
        return (ex_mess)

    #This function is called when _img_index
    @observe("_img_index")
    def update_image_index(self, change):
        """
        change the current_img member variable to the new desired image
        and then update the image div
        """
        self.current_img = self.image_series[self._img_index]
        self.update_image_div_data(change)
        return

    @observe("_img_min", "_img_max")
    def update_image_div_data(self, change):
        """update the image div data

        This function is called whenever the image div needs to be updated.
        It could be triggered directly by changes to _img_min and _img_max, but also
        by function calls from handlers of other events such as image-index-change,
        and ROI-change.

        If the zoom is activated (see flag _zoom)
            this function will call the update_image_div_data_with_zoom function to zoom into
            the image and obtain the Base64 encoding.

        Otherwise, this function calls the getimg_bytes method
            to obtain the new Base64 encoding (of either the new or old image)
            and stores this encoding in _b64value."""

        self.arr = self.current_img.data.copy().astype("float")
        self.curr_img_data = self.arr.copy()
        if self._zoom:
            self.update_image_div_data_with_zoom()
            return
        self._nrows, self._ncols = self.arr.shape
        self._ycoord_max_roi, self._xcoord_max_roi = self.arr.shape
        self._nrows_currimg, self._ncols_currimg = self.arr.shape
        self._b64value = self.getimg_bytes()
        return

    #This function is called when _ROI changes.
    @observe("_ROI")
    def zoom_image(self, change):
        """Sets all values necessary for zooming into a Region of Interest
        and then calls the update_image_div_data function."""
        Xtop, Ytop, Xbottom, Ybottom = self._ROI
        if Xtop < 0:  # invalid ROI means reset
            self._zoom = False
            return self.reset_image()
        self._zoom = True
        self.left = int(Xtop / self.width * self._ncols_currimg)
        self.right = int(Xbottom / self.width * self._ncols_currimg)
        self.top = int(Ytop / self.height * self._nrows_currimg)
        self.bottom = int(Ybottom / self.height * self._nrows_currimg)
        self._xcoord_absolute += (self.left - self.xbuff)
        self._ycoord_absolute += (self.top - self.ybuff)
        self.update_image_div_data(change)
        return

    def reset_image(self):
        """Resets all variables that are involved in zooming to their default values.

        After resetting, the update_image_div_data function is called."""
        self._extrarows = 0
        self._extracols = 0
        self.xbuff = 0
        self.ybuff = 0
        self._xcoord_absolute = 0
        self._ycoord_absolute = 0
        self.get_series_minmax()
        self._vslide_reset += 1
        self.update_image_div_data(None)
        return

    def update_image_div_data_with_zoom(self):
        """The function that controlls zooming on a single image.

        It splices the image data based on the left, right, bottom,
            and top variables calculated in the zoom_image function.

        The function then copies the data in ROI and adds
            buffer rows/columns to the copy to insure the data
            used to create the image is a square numpy array.

        Then, the number of extra rows and/or columns is calculated.

        Finally, the zoomed image data is converted to
            a displayable image by calling the getimg_bytes function."""

        select_width = self.right - self.left
        select_height = self.bottom - self.top
        if select_width == 0: select_width = 1
        if select_height == 0: select_height = 1
        self.arr = self.arr[self._ycoord_absolute:(self._ycoord_absolute +
                                                   select_height),
                            self._xcoord_absolute:(self._xcoord_absolute +
                                                   select_width)]
        self._nrows, self._ncols = self.arr.shape
        self.curr_img_data = self.arr.copy()
        # calculate paddings
        if self._ncols > self._nrows:
            diff = self._ncols - self._nrows
            if diff % 2 == 0:
                addtop = diff // 2
                addbottom = diff // 2
            else:
                addtop = diff // 2 + 1
                addbottom = diff // 2
            self.xbuff = 0
            self.ybuff = addtop
            self._nrows_currimg = self._ncols
            self._ncols_currimg = self._ncols
            self._extrarows = diff
            self._extracols = 0
            extrarows_top = np.full((addtop, self._ncols), 1)
            extrarows_bottom = np.full((addbottom, self._ncols), 1)
            self.curr_img_data = np.vstack(
                (extrarows_top, self.curr_img_data, extrarows_bottom))
        else:
            diff = self._nrows - self._ncols
            if diff % 2 == 0:
                addleft = diff // 2
                addright = diff // 2
            else:
                addleft = diff // 2 + 1
                addright = diff // 2
            self.xbuff = addleft
            self.ybuff = 0
            self._nrows_currimg = self._nrows
            self._ncols_currimg = self._nrows
            self._extrarows = 0
            self._extracols = diff
            extrarows_left = np.full((self._nrows, addleft), 1)
            extrarows_right = np.full((self._nrows, addright), 1)
            self.curr_img_data = np.hstack(
                (extrarows_left, self.curr_img_data, extrarows_right))
        self._xcoord_max_roi = self._xcoord_absolute + self._ncols_currimg - self._extracols
        self._ycoord_max_roi = self._ycoord_absolute + self._nrows_currimg - self._extrarows
        self._b64value = self.getimg_bytes()
        return
Exemplo n.º 26
0
class ClingKernel(Kernel):
    """Cling Kernel for Jupyter"""
    implementation = 'cling_kernel'
    implementation_version = __version__
    language_version = 'X'

    banner = Unicode()

    def _banner_default(self):
        return 'cling-%s' % self.language_version
        return self._banner

    language_info = {
        'name': 'c++',
        'codemirror_mode': 'c++',
        'mimetype': 'text/x-c++src',
        'file_extension': '.c++'
    }

    # Used in handle_input()
    flush_interval = Float(0.25, config=True)

    std = CaselessStrEnum(
        default_value='c++11',
        values=['c++11', 'c++14', 'c++1z', 'c++17'],
        help="C++ standard to use, either c++17, c++1z, c++14 or c++11").tag(
            config=True)

    def __init__(self, **kwargs):
        super(ClingKernel, self).__init__(**kwargs)
        try:
            whichCling = os.readlink(shutil.which('cling'))
        except OSError as e:
            #If cling is not a symlink try a regular file
            #readlink returns POSIX error EINVAL (22) if the
            #argument is not a symlink
            if e.args[0] == 22:
                whichCling = shutil.which('cling')
            else:
                raise e
        except AttributeError:
            from distutils.spawn import find_executable
            whichCling = find_executable('cling')

        if whichCling:
            clingInstDir = os.path.dirname(os.path.dirname(whichCling))
            llvmResourceDir = clingInstDir
        else:
            raise RuntimeError('Cannot find cling in $PATH. No cling, no fun.')

        for ext in ['so', 'dylib', 'dll']:
            libFilename = clingInstDir + "/lib/libclingJupyter." + ext
            if os.access(libFilename, os.R_OK):
                self.libclingJupyter = ctypes.CDLL(
                    clingInstDir + "/lib/libclingJupyter." + ext,
                    mode=ctypes.RTLD_GLOBAL)
                break

        if not getattr(self, 'libclingJupyter', None):
            raise RuntimeError('Cannot find ' + clingInstDir +
                               '/lib/libclingJupyter.{so,dylib,dll}')

        self.libclingJupyter.cling_create.restype = my_void_p
        self.libclingJupyter.cling_eval.restype = my_void_p
        #build -std=c++11 or -std=c++14 option
        stdopt = ("-std=" + self.std).encode('utf-8')
        self.log.info("Using {}".format(stdopt.decode('utf-8')))
        #from IPython.utils import io
        #io.rprint("DBG: Using {}".format(stdopt.decode('utf-8')))
        strarr = ctypes.c_char_p * 5
        argv = strarr(b"clingJupyter", stdopt,
                      b"-I" + clingInstDir.encode('utf-8') + b"/include/", b"",
                      b"")
        llvmResourceDirCP = ctypes.c_char_p(llvmResourceDir.encode('utf8'))

        # The sideband_pipe is used by cling::Jupyter::pushOutput() to publish MIME data to Jupyter.
        self.sideband_pipe, pipe_in = os.pipe()
        self.interp = self.libclingJupyter.cling_create(
            5, argv, llvmResourceDirCP, pipe_in)

        self.libclingJupyter.cling_complete_start.restype = my_void_p
        self.libclingJupyter.cling_complete_next.restype = my_void_p  #c_char_p

    def _process_stdio_data(self, pipe, name):
        """Read from the pipe, send it to IOPub as name stream."""
        data = os.read(pipe, 1024)
        # send output
        self.session.send(self.iopub_socket,
                          'stream', {
                              'name': name,
                              'text': data.decode('utf8', 'replace'),
                          },
                          parent=self._parent_header)

    def _recv_dict(self, pipe):
        """Receive a serialized dict on a pipe

        Returns the dictionary.
        """
        # Wire format:
        #   // Pipe sees (all numbers are longs, except for the first):
        #   // - num bytes in a long (sent as a single unsigned char!)
        #   // - num elements of the MIME dictionary; Jupyter selects one to display.
        #   // For each MIME dictionary element:
        #   //   - length of MIME type key
        #   //   - MIME type key
        #   //   - size of MIME data buffer (including the terminating 0 for
        #   //     0-terminated strings)
        #   //   - MIME data buffer
        data = {}
        b1 = os.read(pipe, 1)
        sizeof_long = struct.unpack('B', b1)[0]
        if sizeof_long == 8:
            fmt = 'Q'
        else:
            fmt = 'L'
        buf = os.read(pipe, sizeof_long)
        num_elements = struct.unpack(fmt, buf)[0]
        for i in range(num_elements):
            buf = os.read(pipe, sizeof_long)
            len_key = struct.unpack(fmt, buf)[0]
            key = os.read(pipe, len_key).decode('utf8')
            buf = os.read(pipe, sizeof_long)
            len_value = struct.unpack(fmt, buf)[0]
            value = os.read(pipe, len_value).decode('utf8')
            data[key] = value
        return data

    def _process_sideband_data(self):
        """publish display-data messages on IOPub.
        """
        data = self._recv_dict(self.sideband_pipe)
        self.session.send(
            self.iopub_socket,
            'display_data',
            content={
                'data': data,
                'metadata': {},
            },
            parent=self._parent_header,
        )

    def forward_streams(self):
        """Put the forwarding pipes in place for stdout, stderr."""
        self.replaced_streams = [FdReplacer("stdout"), FdReplacer("stderr")]

    def handle_input(self):
        """Capture stdout, stderr and sideband. Forward them as stream messages."""
        # create pipe for stdout, stderr
        select_on = [self.sideband_pipe]
        for rs in self.replaced_streams:
            if rs:
                select_on.append(rs.pipe_out)

        r, w, x = select.select(select_on, [], [], self.flush_interval)
        if not r:
            # nothing to read, flush libc's stdout and check again
            libc.fflush(c_stdout_p)
            libc.fflush(c_stderr_p)
            return False

        for fd in r:
            if fd == self.sideband_pipe:
                self._process_sideband_data()
            else:
                if fd == self.replaced_streams[0].pipe_out:
                    rs = 0
                else:
                    rs = 1
                self._process_stdio_data(fd, self.replaced_streams[rs].name)
        return True

    def close_forwards(self):
        """Close the forwarding pipes."""
        libc.fflush(c_stdout_p)
        libc.fflush(c_stderr_p)
        for rs in self.replaced_streams:
            rs.restore()
        self.replaced_streams = []

    def run_cell(self, code, silent=False):
        """Run code in cling, storing the expression result or an empty string if there is none."""
        self.stringResult = self.libclingJupyter.cling_eval(
            self.interp, ctypes.c_char_p(code.encode('utf8')))

    def do_execute(self,
                   code,
                   silent,
                   store_history=True,
                   user_expressions=None,
                   allow_stdin=False):
        """Runs code in cling and handles input; returns the evaluation result."""
        if not code.strip():
            return {
                'status': 'ok',
                'execution_count': self.execution_count,
                'payload': [],
                'user_expressions': {},
            }

        # Redirect stdout, stderr so handle_input() can pick it up.
        self.forward_streams()

        # Run code in cling in a thread.
        run_cell_thread = threading.Thread(target=self.run_cell,
                                           args=(
                                               code,
                                               silent,
                                           ))
        run_cell_thread.start()
        while True:
            self.handle_input()
            if not run_cell_thread.is_alive():
                # self.run_cell() has returned.
                break

        run_cell_thread.join()

        # Any leftovers?
        while self.handle_input():
            True

        self.close_forwards()
        status = 'ok'
        if not self.stringResult:
            status = 'error'
        else:
            # Execution has finished; we have a result.
            self.session.send(
                self.iopub_socket,
                'execute_result',
                content={
                    'data': {
                        'text/plain':
                        ctypes.cast(self.stringResult,
                                    ctypes.c_char_p).value.decode(
                                        'utf8', 'replace'),
                    },
                    'metadata': {},
                    'execution_count': self.execution_count,
                },
                parent=self._parent_header)
            self.libclingJupyter.cling_eval_free(self.stringResult)

        reply = {
            'status': status,
            'execution_count': self.execution_count,
        }

        if status == 'error':
            err = {
                'ename': 'ename',
                'evalue': 'evalue',
                'traceback': [],
            }
            self.send_response(self.iopub_socket, 'error', err)
            reply.update(err)
        elif status == 'ok':
            reply.update({
                'THIS DOES NOT WORK: payload': [{
                    'source':
                    'set_next_input',
                    'replace':
                    True,
                    'text':
                    '//THIS IS MAGIC\n' + code
                }],
                'user_expressions': {},
            })
        else:
            raise ValueError("Invalid status: %r" % status)

        return reply

    def do_complete(self, code, cursor_pos):
        """Provide completions here"""
        # if cursor_pos = cursor_start = cursor_end,
        # matches should be a list of strings to be appended after the cursor
        return {
            'matches': [],
            'cursor_end': cursor_pos,
            'cursor_start': cursor_pos,
            'metadata': {},
            'status': 'ok'
        }
class GatewayClient(SingletonConfigurable):
    """This class manages the configuration.  It's its own singleton class so that we
       can share these values across all objects.  It also contains some helper methods
        to build request arguments out of the various config options.

    """

    url = Unicode(
        default_value=None,
        allow_none=True,
        config=True,
        help="""The url of the Kernel or Enterprise Gateway server where
        kernel specifications are defined and kernel management takes place.
        If defined, this Notebook server acts as a proxy for all kernel
        management and kernel specification retrieval.  (JUPYTER_GATEWAY_URL env var)
        """)

    url_env = 'JUPYTER_GATEWAY_URL'

    @default('url')
    def _url_default(self):
        return os.environ.get(self.url_env)

    @validate('url')
    def _url_validate(self, proposal):
        value = proposal['value']
        # Ensure value, if present, starts with 'http'
        if value is not None and len(value) > 0:
            if not str(value).lower().startswith('http'):
                raise TraitError(
                    "GatewayClient url must start with 'http': '%r'" % value)
        return value

    ws_url = Unicode(
        default_value=None,
        allow_none=True,
        config=True,
        help=
        """The websocket url of the Kernel or Enterprise Gateway server.  If not provided, this value
        will correspond to the value of the Gateway url with 'ws' in place of 'http'.  (JUPYTER_GATEWAY_WS_URL env var)
        """)

    ws_url_env = 'JUPYTER_GATEWAY_WS_URL'

    @default('ws_url')
    def _ws_url_default(self):
        default_value = os.environ.get(self.ws_url_env)
        if default_value is None:
            if self.gateway_enabled:
                default_value = self.url.lower().replace('http', 'ws')
        return default_value

    @validate('ws_url')
    def _ws_url_validate(self, proposal):
        value = proposal['value']
        # Ensure value, if present, starts with 'ws'
        if value is not None and len(value) > 0:
            if not str(value).lower().startswith('ws'):
                raise TraitError(
                    "GatewayClient ws_url must start with 'ws': '%r'" % value)
        return value

    kernels_endpoint_default_value = '/api/kernels'
    kernels_endpoint_env = 'JUPYTER_GATEWAY_KERNELS_ENDPOINT'
    kernels_endpoint = Unicode(
        default_value=kernels_endpoint_default_value,
        config=True,
        help=
        """The gateway API endpoint for accessing kernel resources (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var)"""
    )

    @default('kernels_endpoint')
    def _kernels_endpoint_default(self):
        return os.environ.get(self.kernels_endpoint_env,
                              self.kernels_endpoint_default_value)

    kernelspecs_endpoint_default_value = '/api/kernelspecs'
    kernelspecs_endpoint_env = 'JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT'
    kernelspecs_endpoint = Unicode(
        default_value=kernelspecs_endpoint_default_value,
        config=True,
        help=
        """The gateway API endpoint for accessing kernelspecs (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var)"""
    )

    @default('kernelspecs_endpoint')
    def _kernelspecs_endpoint_default(self):
        return os.environ.get(self.kernelspecs_endpoint_env,
                              self.kernelspecs_endpoint_default_value)

    kernelspecs_resource_endpoint_default_value = '/kernelspecs'
    kernelspecs_resource_endpoint_env = 'JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT'
    kernelspecs_resource_endpoint = Unicode(
        default_value=kernelspecs_resource_endpoint_default_value,
        config=True,
        help="""The gateway endpoint for accessing kernelspecs resources
            (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var)""")

    @default('kernelspecs_resource_endpoint')
    def _kernelspecs_resource_endpoint_default(self):
        return os.environ.get(self.kernelspecs_resource_endpoint_env,
                              self.kernelspecs_resource_endpoint_default_value)

    connect_timeout_default_value = 40.0
    connect_timeout_env = 'JUPYTER_GATEWAY_CONNECT_TIMEOUT'
    connect_timeout = Float(
        default_value=connect_timeout_default_value,
        config=True,
        help=
        """The time allowed for HTTP connection establishment with the Gateway server.
        (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var)""")

    @default('connect_timeout')
    def connect_timeout_default(self):
        return float(
            os.environ.get('JUPYTER_GATEWAY_CONNECT_TIMEOUT',
                           self.connect_timeout_default_value))

    request_timeout_default_value = 40.0
    request_timeout_env = 'JUPYTER_GATEWAY_REQUEST_TIMEOUT'
    request_timeout = Float(
        default_value=request_timeout_default_value,
        config=True,
        help=
        """The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT env var)"""
    )

    @default('request_timeout')
    def request_timeout_default(self):
        return float(
            os.environ.get('JUPYTER_GATEWAY_REQUEST_TIMEOUT',
                           self.request_timeout_default_value))

    client_key = Unicode(
        default_value=None,
        allow_none=True,
        config=True,
        help=
        """The filename for client SSL key, if any.  (JUPYTER_GATEWAY_CLIENT_KEY env var)
        """)
    client_key_env = 'JUPYTER_GATEWAY_CLIENT_KEY'

    @default('client_key')
    def _client_key_default(self):
        return os.environ.get(self.client_key_env)

    client_cert = Unicode(
        default_value=None,
        allow_none=True,
        config=True,
        help=
        """The filename for client SSL certificate, if any.  (JUPYTER_GATEWAY_CLIENT_CERT env var)
        """)
    client_cert_env = 'JUPYTER_GATEWAY_CLIENT_CERT'

    @default('client_cert')
    def _client_cert_default(self):
        return os.environ.get(self.client_cert_env)

    ca_certs = Unicode(
        default_value=None,
        allow_none=True,
        config=True,
        help=
        """The filename of CA certificates or None to use defaults.  (JUPYTER_GATEWAY_CA_CERTS env var)
        """)
    ca_certs_env = 'JUPYTER_GATEWAY_CA_CERTS'

    @default('ca_certs')
    def _ca_certs_default(self):
        return os.environ.get(self.ca_certs_env)

    http_user = Unicode(
        default_value=None,
        allow_none=True,
        config=True,
        help=
        """The username for HTTP authentication. (JUPYTER_GATEWAY_HTTP_USER env var)
        """)
    http_user_env = 'JUPYTER_GATEWAY_HTTP_USER'

    @default('http_user')
    def _http_user_default(self):
        return os.environ.get(self.http_user_env)

    http_pwd = Unicode(
        default_value=None,
        allow_none=True,
        config=True,
        help=
        """The password for HTTP authentication.  (JUPYTER_GATEWAY_HTTP_PWD env var)
        """)
    http_pwd_env = 'JUPYTER_GATEWAY_HTTP_PWD'

    @default('http_pwd')
    def _http_pwd_default(self):
        return os.environ.get(self.http_pwd_env)

    headers_default_value = '{}'
    headers_env = 'JUPYTER_GATEWAY_HEADERS'
    headers = Unicode(
        default_value=headers_default_value,
        allow_none=True,
        config=True,
        help=
        """Additional HTTP headers to pass on the request.  This value will be converted to a dict.
          (JUPYTER_GATEWAY_HEADERS env var)
        """)

    @default('headers')
    def _headers_default(self):
        return os.environ.get(self.headers_env, self.headers_default_value)

    auth_token = Unicode(
        default_value=None,
        allow_none=True,
        config=True,
        help=
        """The authorization token used in the HTTP headers. The header will be formatted as:

            {
                'Authorization': '{auth_scheme} {auth_token}'
            }

        (JUPYTER_GATEWAY_AUTH_TOKEN env var)""")
    auth_token_env = 'JUPYTER_GATEWAY_AUTH_TOKEN'

    @default('auth_token')
    def _auth_token_default(self):
        return os.environ.get(self.auth_token_env, '')

    auth_scheme = Unicode(
        default_value=None,
        allow_none=True,
        config=True,
        help=
        """The auth scheme, added as a prefix to the authorization token used in the HTTP headers.
        (JUPYTER_GATEWAY_AUTH_SCHEME env var)""")
    auth_scheme_env = 'JUPYTER_GATEWAY_AUTH_SCHEME'

    @default('auth_scheme')
    def _auth_scheme_default(self):
        return os.environ.get(self.auth_scheme_env, 'token')

    validate_cert_default_value = True
    validate_cert_env = 'JUPYTER_GATEWAY_VALIDATE_CERT'
    validate_cert = Bool(
        default_value=validate_cert_default_value,
        config=True,
        help=
        """For HTTPS requests, determines if server's certificate should be validated or not.
        (JUPYTER_GATEWAY_VALIDATE_CERT env var)""")

    @default('validate_cert')
    def validate_cert_default(self):
        return bool(
            os.environ.get(self.validate_cert_env,
                           str(self.validate_cert_default_value)) not in
            ['no', 'false'])

    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        self._static_args = {}  # initialized on first use

    env_whitelist_default_value = ''
    env_whitelist_env = 'JUPYTER_GATEWAY_ENV_WHITELIST'
    env_whitelist = Unicode(
        default_value=env_whitelist_default_value,
        config=True,
        help=
        """A comma-separated list of environment variable names that will be included, along with
         their values, in the kernel startup request.  The corresponding `env_whitelist` configuration
         value must also be set on the Gateway server - since that configuration value indicates which
         environmental values to make available to the kernel. (JUPYTER_GATEWAY_ENV_WHITELIST env var)"""
    )

    @default('env_whitelist')
    def _env_whitelist_default(self):
        return os.environ.get(self.env_whitelist_env,
                              self.env_whitelist_default_value)

    gateway_retry_interval_default_value = 1.0
    gateway_retry_interval_env = 'JUPYTER_GATEWAY_RETRY_INTERVAL'
    gateway_retry_interval = Float(
        default_value=gateway_retry_interval_default_value,
        config=True,
        help=
        """The time allowed for HTTP reconnection with the Gateway server for the first time.
            Next will be JUPYTER_GATEWAY_RETRY_INTERVAL multiplied by two in factor of numbers of retries
            but less than JUPYTER_GATEWAY_RETRY_INTERVAL_MAX.
            (JUPYTER_GATEWAY_RETRY_INTERVAL env var)""")

    @default('gateway_retry_interval')
    def gateway_retry_interval_default(self):
        return float(
            os.environ.get('JUPYTER_GATEWAY_RETRY_INTERVAL',
                           self.gateway_retry_interval_default_value))

    gateway_retry_interval_max_default_value = 30.0
    gateway_retry_interval_max_env = 'JUPYTER_GATEWAY_RETRY_INTERVAL_MAX'
    gateway_retry_interval_max = Float(
        default_value=gateway_retry_interval_max_default_value,
        config=True,
        help=
        """The maximum time allowed for HTTP reconnection retry with the Gateway server.
            (JUPYTER_GATEWAY_RETRY_INTERVAL_MAX env var)""")

    @default('gateway_retry_interval_max')
    def gateway_retry_interval_max_default(self):
        return float(
            os.environ.get('JUPYTER_GATEWAY_RETRY_INTERVAL_MAX',
                           self.gateway_retry_interval_max_default_value))

    gateway_retry_max_default_value = 5
    gateway_retry_max_env = 'JUPYTER_GATEWAY_RETRY_MAX'
    gateway_retry_max = Int(
        default_value=gateway_retry_max_default_value,
        config=True,
        help=
        """The maximum retries allowed for HTTP reconnection with the Gateway server.
            (JUPYTER_GATEWAY_RETRY_MAX env var)""")

    @default('gateway_retry_max')
    def gateway_retry_max_default(self):
        return int(
            os.environ.get('JUPYTER_GATEWAY_RETRY_MAX',
                           self.gateway_retry_max_default_value))

    @property
    def gateway_enabled(self):
        return bool(self.url is not None and len(self.url) > 0)

    # Ensure KERNEL_LAUNCH_TIMEOUT has a default value.
    KERNEL_LAUNCH_TIMEOUT = int(os.environ.get('KERNEL_LAUNCH_TIMEOUT', 40))

    def init_static_args(self):
        """Initialize arguments used on every request.  Since these are static values, we'll
        perform this operation once.

        """
        # Ensure that request timeout and KERNEL_LAUNCH_TIMEOUT are the same, taking the
        #  greater value of the two.
        if self.request_timeout < float(GatewayClient.KERNEL_LAUNCH_TIMEOUT):
            self.request_timeout = float(GatewayClient.KERNEL_LAUNCH_TIMEOUT)
        elif self.request_timeout > float(GatewayClient.KERNEL_LAUNCH_TIMEOUT):
            GatewayClient.KERNEL_LAUNCH_TIMEOUT = int(self.request_timeout)
        # Ensure any adjustments are reflected in env.
        os.environ['KERNEL_LAUNCH_TIMEOUT'] = str(
            GatewayClient.KERNEL_LAUNCH_TIMEOUT)

        self._static_args['headers'] = json.loads(self.headers)
        if 'Authorization' not in self._static_args['headers'].keys():
            self._static_args['headers'].update({
                'Authorization':
                '{} {}'.format(self.auth_scheme, self.auth_token)
            })
        self._static_args['connect_timeout'] = self.connect_timeout
        self._static_args['request_timeout'] = self.request_timeout
        self._static_args['validate_cert'] = self.validate_cert
        if self.client_cert:
            self._static_args['client_cert'] = self.client_cert
            self._static_args['client_key'] = self.client_key
            if self.ca_certs:
                self._static_args['ca_certs'] = self.ca_certs
        if self.http_user:
            self._static_args['auth_username'] = self.http_user
        if self.http_pwd:
            self._static_args['auth_password'] = self.http_pwd

    def load_connection_args(self, **kwargs):
        """Merges the static args relative to the connection, with the given keyword arguments.  If statics
         have yet to be initialized, we'll do that here.

        """
        if len(self._static_args) == 0:
            self.init_static_args()

        kwargs.update(self._static_args)
        return kwargs
Exemplo n.º 28
0
class Spawner(LoggingConfigurable):
    """Base class for spawning single-user notebook servers.
    
    Subclass this, and override the following methods:
    
    - load_state
    - get_state
    - start
    - stop
    - poll
    """
    
    db = Any()
    user = Any()
    hub = Any()
    authenticator = Any()
    api_token = Unicode()
    ip = Unicode('127.0.0.1',
        help="The IP address (or hostname) the single-user server should listen on"
    ).tag(config=True)
    start_timeout = Integer(60,
        help="""Timeout (in seconds) before giving up on the spawner.
        
        This is the timeout for start to return, not the timeout for the server to respond.
        Callers of spawner.start will assume that startup has failed if it takes longer than this.
        start should return when the server process is started and its location is known.
        """
    ).tag(config=True)

    http_timeout = Integer(30,
        help="""Timeout (in seconds) before giving up on a spawned HTTP server

        Once a server has successfully been spawned, this is the amount of time
        we wait before assuming that the server is unable to accept
        connections.
        """
    ).tag(config=True)

    poll_interval = Integer(30,
        help="""Interval (in seconds) on which to poll the spawner."""
    ).tag(config=True)
    _callbacks = List()
    _poll_callback = Any()
    
    debug = Bool(False,
        help="Enable debug-logging of the single-user server"
    ).tag(config=True)
    
    options_form = Unicode("", help="""
        An HTML form for options a user can specify on launching their server.
        The surrounding `<form>` element and the submit button are already provided.
        
        For example:
        
            Set your key:
            <input name="key" val="default_key"></input>
            <br>
            Choose a letter:
            <select name="letter" multiple="true">
              <option value="A">The letter A</option>
              <option value="B">The letter B</option>
            </select>
    """).tag(config=True)

    def options_from_form(self, form_data):
        """Interpret HTTP form data
        
        Form data will always arrive as a dict of lists of strings.
        Override this function to understand single-values, numbers, etc.
        
        This should coerce form data into the structure expected by self.user_options,
        which must be a dict.
        
        Instances will receive this data on self.user_options, after passing through this function,
        prior to `Spawner.start`.
        """
        return form_data
    
    user_options = Dict(help="This is where form-specified options ultimately end up.")
    
    env_keep = List([
        'PATH',
        'PYTHONPATH',
        'CONDA_ROOT',
        'CONDA_DEFAULT_ENV',
        'VIRTUAL_ENV',
        'LANG',
        'LC_ALL',
    ],
        help="Whitelist of environment variables for the subprocess to inherit"
    ).tag(config=True)
    env = Dict(help="""Deprecated: use Spawner.get_env or Spawner.environment
    
    - extend Spawner.get_env for adding required env in Spawner subclasses
    - Spawner.environment for config-specified env
    """)
    
    environment = Dict(
        help="""Environment variables to load for the Spawner.

        Value could be a string or a callable. If it is a callable, it will
        be called with one parameter, which will be the instance of the spawner
        in use. It should quickly (without doing much blocking operations) return
        a string that will be used as the value for the environment variable.
        """
    ).tag(config=True)
    
    cmd = Command(['jupyterhub-singleuser'],
        help="""The command used for starting notebooks."""
    ).tag(config=True)
    args = List(Unicode(),
        help="""Extra arguments to be passed to the single-user server"""
    ).tag(config=True)
    
    notebook_dir = Unicode('',
        help="""The notebook directory for the single-user server
        
        `~` will be expanded to the user's home directory
        `%U` will be expanded to the user's username
        """
    ).tag(config=True)

    default_url = Unicode('',
        help="""The default URL for the single-user server. 

        Can be used in conjunction with --notebook-dir=/ to enable 
        full filesystem traversal, while preserving user's homedir as
        landing page for notebook

        `%U` will be expanded to the user's username
        """
    ).tag(config=True)
    
    disable_user_config = Bool(False,
        help="""Disable per-user configuration of single-user servers.
        
        This prevents any config in users' $HOME directories
        from having an effect on their server.
        """
    ).tag(config=True)
    
    def __init__(self, **kwargs):
        super(Spawner, self).__init__(**kwargs)
        if self.user.state:
            self.load_state(self.user.state)
    
    def load_state(self, state):
        """load state from the database
        
        This is the extensible part of state
        
        Override in a subclass if there is state to load.
        Should call `super`.
        
        See Also
        --------
        
        get_state, clear_state
        """
        pass
    
    def get_state(self):
        """store the state necessary for load_state
        
        A black box of extra state for custom spawners.
        Subclasses should call `super`.
        
        Returns
        -------
        
        state: dict
             a JSONable dict of state
        """
        state = {}
        return state
    
    def clear_state(self):
        """clear any state that should be cleared when the process stops
        
        State that should be preserved across server instances should not be cleared.
        
        Subclasses should call super, to ensure that state is properly cleared.
        """
        self.api_token = ''
    
    def get_env(self):
        """Return the environment dict to use for the Spawner.

        This applies things like `env_keep`, anything defined in `Spawner.environment`,
        and adds the API token to the env.

        Use this to access the env in Spawner.start to allow extension in subclasses.
        """
        env = {}
        if self.env:
            warnings.warn("Spawner.env is deprecated, found %s" % self.env, DeprecationWarning)
            env.update(self.env)
        
        for key in self.env_keep:
            if key in os.environ:
                env[key] = os.environ[key]

        # config overrides. If the value is a callable, it will be called with
        # one parameter - the current spawner instance - and the return value
        # will be assigned to the environment variable. This will be called at
        # spawn time.
        for key, value in self.environment.items():
            if callable(value):
                env[key] = value(self)
            else:
                env[key] = value

        env['JPY_API_TOKEN'] = self.api_token
        return env
    
    def get_args(self):
        """Return the arguments to be passed after self.cmd"""
        args = [
            '--user=%s' % self.user.name,
            '--port=%i' % self.user.server.port,
            '--cookie-name=%s' % self.user.server.cookie_name,
            '--base-url=%s' % self.user.server.base_url,
            '--hub-host=%s' % self.hub.host,
            '--hub-prefix=%s' % self.hub.server.base_url,
            '--hub-api-url=%s' % self.hub.api_url,
            ]
        if self.ip:
            args.append('--ip=%s' % self.ip)
        if self.notebook_dir:
            self.notebook_dir = self.notebook_dir.replace("%U",self.user.name)
            args.append('--notebook-dir=%s' % self.notebook_dir)
        if self.default_url:
            self.default_url = self.default_url.replace("%U",self.user.name)
            args.append('--NotebookApp.default_url=%s' % self.default_url)

        if self.debug:
            args.append('--debug')
        if self.disable_user_config:
            args.append('--disable-user-config')
        args.extend(self.args)
        return args
    
    @gen.coroutine
    def start(self):
        """Start the single-user process"""
        raise NotImplementedError("Override in subclass. Must be a Tornado gen.coroutine.")
    
    @gen.coroutine
    def stop(self, now=False):
        """Stop the single-user process"""
        raise NotImplementedError("Override in subclass. Must be a Tornado gen.coroutine.")
    
    @gen.coroutine
    def poll(self):
        """Check if the single-user process is running

        return None if it is, an exit status (0 if unknown) if it is not.
        """
        raise NotImplementedError("Override in subclass. Must be a Tornado gen.coroutine.")
    
    def add_poll_callback(self, callback, *args, **kwargs):
        """add a callback to fire when the subprocess stops
        
        as noticed by periodic poll_and_notify()
        """
        if args or kwargs:
            cb = callback
            callback = lambda : cb(*args, **kwargs)
        self._callbacks.append(callback)
    
    def stop_polling(self):
        """stop the periodic poll"""
        if self._poll_callback:
            self._poll_callback.stop()
            self._poll_callback = None
        
    def start_polling(self):
        """Start polling periodically
        
        callbacks registered via `add_poll_callback` will fire
        if/when the process stops.
        
        Explicit termination via the stop method will not trigger the callbacks.
        """
        if self.poll_interval <= 0:
            self.log.debug("Not polling subprocess")
            return
        else:
            self.log.debug("Polling subprocess every %is", self.poll_interval)
        
        self.stop_polling()
        
        self._poll_callback = PeriodicCallback(
            self.poll_and_notify,
            1e3 * self.poll_interval
        )
        self._poll_callback.start()

    @gen.coroutine
    def poll_and_notify(self):
        """Used as a callback to periodically poll the process,
        and notify any watchers
        """
        status = yield self.poll()
        if status is None:
            # still running, nothing to do here
            return
        
        self.stop_polling()
        
        add_callback = IOLoop.current().add_callback
        for callback in self._callbacks:
            add_callback(callback)
    
    death_interval = Float(0.1)
    @gen.coroutine
    def wait_for_death(self, timeout=10):
        """wait for the process to die, up to timeout seconds"""
        loop = IOLoop.current()
        for i in range(int(timeout / self.death_interval)):
            status = yield self.poll()
            if status is not None:
                break
            else:
                yield gen.sleep(self.death_interval)
Exemplo n.º 29
0
class KernelRestarterBase(LoggingConfigurable):
    """Monitor and autorestart a kernel."""

    debug = Bool(
        False,
        config=True,
        help="""Whether to include every poll event in debugging output.

        Has to be set explicitly, because there will be *a lot* of output.
        """)

    time_to_dead = Float(3.0,
                         config=True,
                         help="""Kernel heartbeat interval in seconds.""")

    restart_limit = Integer(
        5,
        config=True,
        help=
        """The number of consecutive autorestarts before the kernel is presumed dead."""
    )

    _restarting = False
    _restart_count = 0

    def __init__(self, kernel_manager, kernel_type, kernel_finder=None, **kw):
        super(KernelRestarterBase, self).__init__(**kw)
        self.kernel_manager = kernel_manager
        self.kernel_type = kernel_type
        self.kernel_finder = kernel_finder or KernelFinder.from_entrypoints()
        self.callbacks = dict(died=[], restarted=[], failed=[])

    def start(self):
        """Start monitoring the kernel."""
        raise NotImplementedError("Must be implemented in a subclass")

    def stop(self):
        """Stop monitoring."""
        raise NotImplementedError("Must be implemented in a subclass")

    def add_callback(self, f, event):
        """
        Register a callback to fire on a particular event.

        Possible values for event:
          'died': the monitored kernel has died

          'restarted': a restart has been attempted (this does not necessarily mean that the new kernel is usable).

          'failed': *restart_limit* attempts have failed in quick succession, and the restarter is giving up.
        """
        self.callbacks[event].append(f)

    def remove_callback(self, f, event):
        """Unregister a callback from a particular event

        Possible values for *event* are the same as in :meth:`add_callback`.
        """
        try:
            self.callbacks[event].remove(f)
        except ValueError:
            pass

    def _fire_callbacks(self, event, data):
        """fire our callbacks for a particular event"""
        for callback in self.callbacks[event]:
            try:
                callback(data)
            except Exception as e:
                self.log.error("KernelRestarter: %s callback %r failed",
                               event,
                               callback,
                               exc_info=True)

    async def do_restart(self, auto=False):
        """Called when the kernel has died"""
        if auto and self._restarting:
            self._restart_count += 1
        else:
            self._restart_count = 1

        if self._restart_count >= self.restart_limit:
            self.log.warning("KernelRestarter: restart failed")
            self._fire_callbacks('failed', {
                'restart_count': self._restart_count,
            })
            self._restarting = False
            self._restart_count = 0
            self.stop()
        else:
            cwd = getattr(self.kernel_manager, 'cwd', None)  # :-/
            self.log.info("KernelRestarter: starting new manager (%i/%i)",
                          self._restart_count, self.restart_limit)
            await self.kernel_manager.cleanup()
            conn_info, mgr = await self.kernel_finder.launch(
                self.kernel_type, cwd)
            self._fire_callbacks('restarted', {
                'auto': auto,
                'connection_info': conn_info,
                'manager': mgr,
            })
            self.kernel_manager = mgr
            self._restarting = True

    async def poll(self):
        if self.debug:
            self.log.debug('Polling kernel...')
        if not await self.kernel_manager.is_alive():
            self._fire_callbacks('died', {})
            await self.do_restart(auto=True)
        else:
            if self._restarting:
                self.log.debug("KernelRestarter: restart apparently succeeded")
            self._restarting = False
Exemplo n.º 30
0
class ImageDataGraph(base.DOMWidget):
    """The backend python class for the custom ImageDataGraph widget.

    This class declares and initializes all of the data that is synced
        between the front- and back-ends of the widget code.
    It also provides the majority of the mathematical calculations that run this widget."""

    _view_name = Unicode("ImgDataGraphView").tag(sync=True)
    _model_name = Unicode("ImgDataGraphModel").tag(sync=True)

    _b64value = Unicode().tag(sync=True)
    _graphb64 = Unicode().tag(sync=True)
    _format = Unicode().tag(sync=True)
    _nrows = Integer().tag(sync=True)
    _ncols = Integer().tag(sync=True)
    _offsetX1 = Float().tag(sync=True)
    _offsetY1 = Float().tag(sync=True)
    _offsetX2 = Float().tag(sync=True)
    _offsetY2 = Float().tag(sync=True)
    _img_min = Float().tag(sync=True)
    _img_max = Float().tag(sync=True)
    _graph_click = Integer(0).tag(sync=True)
    _linepix_width = Float(1.0).tag(sync=True)
    _num_bins = Integer(1).tag(sync=True)

    width = Integer().tag(sync=True)
    height = Integer().tag(sync=True)

    def __init__(self, image, width, height, uformat="png"):
        """Constructor method for setting the necessary
               member variables (including synced ones).
               This function also calls the getimg_bytes() method
               to create provide the image data to create the widget.

        Parameters:

        * image: an ImageFile object (see
              https://github.com/ornlneutronimaging/iMars3D/blob/master/python/imars3d/ImageFile.py
              for more details) that stores the data
              for the image to be used in this widget.
        * width: an integer that is used to set the width of the image and UI elements.
        * height: an integer that is used to set the height of the image and UI elements.
        * uformat: a string indicating the type of image
              that the displayed image and graph will be.
              By default, this is set to "png"."""

        self.img = image
        self.img_data = image.data.copy()
        self.width = width
        self.height = height
        self._format = uformat
        self._nrows, self._ncols = self.img_data.shape
        self._img_min, self._img_max = int(np.min(self.img_data)), int(np.max(self.img_data))
        self._b64value = self.getimg_bytes()
        super(ImageDataGraph, self).__init__()
        return

    def getimg_bytes(self):
        """Encodes the image's data into Base64."""

        img = ((self.img_data-self._img_min)/(self._img_max-self._img_min)*(2**8-1)).astype("uint8")
        size = np.max(img.shape)
        view_size = np.max((self.width, self.height))
        if size > view_size:
            downsample_ratio = view_size/size
            import scipy.misc
            img = scipy.misc.imresize(img, downsample_ratio)
        else:
            upsample_ratio = view_size/size
            import scipy.misc
            img = scipy.misc.imresize(img, upsample_ratio)
        if sys.version_info < (3, 0):
            from cStringIO import StringIO
            f = StringIO()
        else:
            from io import BytesIO
            f = BytesIO()
        import PIL.Image, base64
        PIL.Image.fromarray(img).save(f, self._format)
        imgb64v = base64.b64encode(f.getvalue())
        return imgb64v

    #This function is called when the value of _graph_click changes
    @observe("_graph_click")
    def graph_data(self, change):
        """Determines whether the graph calculations should
               include width or not and calls the appropriate function."""

        if self._linepix_width == 1:
            self._graphb64 = self.nowidth_graph()
        else:
            self._graphb64 = self.width_graph()
        return

    def nowidth_graph(self):
        """Collects the data for a line with no width.
               Then, creates a matplotlib graph of the data,
               and encodes the graph into Base64 for the JavaScript code to display."""

        p1x_abs = self._offsetX1/self.width * self._ncols
        p1y_abs = self._offsetY1/self.height * self._nrows
        p2x_abs = self._offsetX2/self.width * self._ncols
        p2y_abs = self._offsetY2/self.height * self._nrows
        if p1x_abs > p2x_abs:
            tempx = p2x_abs
            tempy = p2y_abs
            p2x_abs = p1x_abs
            p2y_abs = p1y_abs
            p1x_abs = tempx
            p1y_abs = tempy
        xcoords = []
        ycoords = []
        dists = []
        vals = []
        curr_x_abs = p1x_abs
        curr_y_abs = p1y_abs
        curr_x = int(curr_x_abs)
        curr_y = int(curr_y_abs)
        xcoords.append(curr_x)
        ycoords.append(curr_y)
        vals.append(self.img_data[curr_y, curr_x])
        if p2y_abs == p1y_abs and p2x_abs != p1x_abs:
            while curr_x_abs < p2x_abs:
                curr_x_abs += 1
                curr_x = int(curr_x_abs)
                curr_y = int(curr_y_abs)
                xcoords.append(curr_x)
                ycoords.append(curr_y)
                vals.append(self.img_data[curr_y, curr_x])
        elif p2x_abs == p1x_abs and p2y_abs != p1y_abs:
            while curr_y_abs < p2y_abs:
                curr_y_abs += 1
                curr_x = int(curr_x_abs)
                curr_y = int(curr_y_abs)
                xcoords.append(curr_x)
                ycoords.append(curr_y)
                vals.append(self.img_data[curr_y, curr_x])
        else:
            while curr_x_abs < p2x_abs:
                slope = (p2y_abs - p1y_abs) / (p2x_abs - p1x_abs)
                curr_x_abs += 1
                curr_y_abs += slope
                curr_x = int(curr_x_abs)
                curr_y = int(curr_y_abs)
                if curr_x_abs < p2x_abs:
                    xcoords.append(curr_x)
                    ycoords.append(curr_y)
                    vals.append(self.img_data[curr_y, curr_x])
        curr_x = int(p2x_abs)
        curr_y = int(p2y_abs)
        xcoords.append(curr_x)
        ycoords.append(curr_y)
        vals.append(self.img_data[curr_x, curr_y])
        for x, y in np.nditer([xcoords, ycoords]):
            dist = np.sqrt(((x - xcoords[0])**2 + (y - ycoords[0])**2))
            dists.append(dist)
        plt.plot(dists, vals)
        plt.xlim(np.min(dists) * 0.75, np.max(dists))
        plt.ylim(np.min(vals) * 0.75, np.max(vals) * 1.25)
        plt.xlabel("Distance from Initial Point")
        plt.ylabel("Value")
        graph = plt.gcf()
        if sys.version_info < (3, 0):
            from StringIO import StringIO
            graphdata = StringIO()
        else:
            from io import BytesIO
            graphdata = BytesIO()
        graph.savefig(graphdata, format=self._format)
        graphdata.seek(0)
        import base64
        gb64v = base64.b64encode(graphdata.read())
        plt.clf()
        return gb64v

    def width_graph(self):
        """Creates the graph for a line with width.
               First, it calculates the endpoints of the drawn line.
               Then, depending on whether the line is horizontal,
               vertical, or diagonal, it calls the corresponding function
               to get the data needed for graphing. Finally, it creates
               the matplotlib graph and encodes it into Base64."""

        p1x_abs = self._offsetX1/self.width * self._ncols
        p1y_abs = self._offsetY1/self.height * self._nrows
        p2x_abs = self._offsetX2/self.width * self._ncols
        p2y_abs = self._offsetY2/self.height * self._nrows
        dists = []
        vals = []
        if p1y_abs == p2y_abs and p1x_abs != p2x_abs:
            dists, vals, bar_width = self.get_data_horizontal(p1x_abs, p1y_abs, p2x_abs)
        elif p1y_abs != p2y_abs and p1x_abs == p2x_abs:
            dists, vals, bar_width = self.get_data_vertical(p1x_abs, p1y_abs, p2y_abs)
        else:
            dists, vals, bar_width = self.get_data_diagonal(p1x_abs, p1y_abs, p2x_abs, p2y_abs)
        plt.bar(dists, vals, width=bar_width)
        plt.xlabel("Distance from Initial Point")
        plt.ylabel("Value")
        graph = plt.gcf()
        if sys.version_info < (3, 0):
            from StringIO import StringIO
            graphdata = StringIO()
        else:
            from io import BytesIO
            graphdata = BytesIO()
        graph.savefig(graphdata, format=self._format)
        graphdata.seek(0)
        import base64
        gb64v = base64.b64encode(graphdata.read())
        plt.clf()
        return gb64v

    def get_data_horizontal(self, x_init, y_init, x_fin):
        """Calculates the graphing data for a horizontal line with width.

        Parameters:

        * x_init: a float containing the exact mathematical
              x coordinate of the first point of the line.
        * y_init: a float containing the exact mathematical
              y coordinate of the first point of the line.
        * x_fin: a float containing the exact mathematical
              x coordinate of the last point of the line."""

        vals = []
        x0 = x_init
        x1 = x_fin
        if x0 > x1:
            tempx = x1
            x1 = x0
            x0 = tempx
        wid = self._linepix_width//self.height * self._nrows
        top = y_init - wid//2
        if int(top) < 0:
            top = 0
        bottom = y_init + wid//2 + 1
        if int(bottom) > self._nrows - 1:
            bottom = self._nrows - 1
        max_dist = np.sqrt((x1 - x0)**2)
        bin_step = max_dist / self._num_bins
        bins = [0]
        curr_bin_max = 0
        for i in range(self._num_bins):
            curr_bin_max += bin_step
            bins.append(curr_bin_max)
        intensities = np.zeros(len(bins))
        num_binvals = np.zeros(len(bins))
        Y, X = np.mgrid[top:bottom, x0:(x1+1)]
        for x, y in np.nditer([X, Y]):
            for b in bins:
                ind = bins.index(b)
                if ind < len(bins) - 1:
                    if x >= b + x0 and x < bins[ind+1] + x0:
                        intensities[ind] = intensities[ind] + self.img_data[int(y), int(x)]
                        num_binvals[ind] = num_binvals[ind] + 1
                        break
        for val, num in np.nditer([intensities, num_binvals]):
            if num == 0:
                vals.append(0)
            else:
                vals.append(val/num)
        return bins, vals, bin_step

    def get_data_vertical(self, x_init, y_init, y_fin):
        """Calculates the graphing data for a vertical line with width.

        Parameters:

        * x_init: a float containing the exact mathematical
              x coordinate of the first point of the line.
        * y_init: a float containing the exact mathematical
              y coordinate of the first point of the line.
        * y_fin: a float containing the exact mathematical
              y coordinate of the last point of the line."""

        vals = []
        y0 = y_init
        y1 = y_fin
        if y0 > y1:
            tempy = y1
            y1 = y0
            y0 = tempy
        wid = self._linepix_width//self.width * self._ncols
        left = x_init - wid//2
        if int(left) < 0:
            left = 0
        right = x_init + wid//2 + 1
        if int(right) > self._ncols - 1:
            right = self._ncols - 1
        max_dist = np.sqrt((y1 - y0)**2)
        bin_step = max_dist / self._num_bins
        bins = [0]
        curr_bin_max = 0
        for i in range(self._num_bins):
            curr_bin_max += bin_step
            bins.append(curr_bin_max)
        intensities = np.zeros(len(bins))
        num_binvals = np.zeros(len(bins))
        Y, X = np.mgrid[y0:(y1+1), left:right]
        for x, y in np.nditer([X, Y]):
            for b in bins:
                ind = bins.index(b)
                if ind < len(bins) - 1:
                    if y >= b + y0 and y < bins[ind+1] + y0:
                        intensities[ind] = intensities[ind] + self.img_data[int(y), int(x)]
                        num_binvals[ind] = num_binvals[ind] + 1
                        break
        for val, num in np.nditer([intensities, num_binvals]):
            if num == 0:
                vals.append(0)
            else:
                vals.append(val/num)
        return bins, vals, bin_step

    def get_data_diagonal(self, x_init, y_init, x_fin, y_fin):
        """Calculates the graphing data for a vertical line with width.

        Parameters:

        * x_init: a float containing the exact mathematical
              x coordinate of the first point of the line.
        * y_init: a float containing the exact mathematical
              y coordinate of the first point of the line.
        * x_fin: a float containing the exact mathematical
              x coordinate of the last point of the line.
        * y_fin: a float containing the exact mathematical
              y coordinate of the last point of the line."""

        bins = []
        vals = []
        x0 = x_init
        x1 = x_fin
        y0 = y_init
        y1 = y_fin
        if x0 > x1:
            tempx = x1
            tempy = y1
            x1 = x0
            y1 = y0
            x0 = tempx
            y0 = tempy
        slope = (y1 - y0) / (x1 - x0)
        angle = np.arctan(slope)
        wid_x = abs((self._linepix_width * np.cos(angle))/self.width * self._ncols)
        wid_y = abs((self._linepix_width * np.sin(angle))/self.height * self._nrows)
        wid = np.sqrt((wid_x)**2 + (wid_y)**2)
        left = x0 - (wid_x // 2)
        right = x1 + (wid_x // 2) + 1
        if slope > 0:
            bottom = y0 - (wid_y // 2)
            top = y1 + (wid_y // 2) + 1
        else:
            bottom = y1 - (wid_y // 2)
            top = y0 + (wid_y // 2)
        if int(bottom) < 0:
            bottom = 0
        if int(top) > self._nrows - 1:
            top = self._nrows - 1
        if int(left) < 0:
            left = 0
        if int(right) > self._ncols - 1:
            right = self._ncols - 1
        Y, X = np.mgrid[bottom:top, left:right]
        h_x = X - x0
        h_y = Y - y0
        norm_x = (y0 - y1) / np.sqrt((y0 - y1)**2 + (x1 - x0)**2)
        norm_y = (x1 - x0) / np.sqrt((y0 - y1)**2 + (x1 - x0)**2)
        e_x = (x1 - x0) / np.sqrt((x1 - x0)**2 + (y1 - y0)**2)
        e_y = (y1 - y0) / np.sqrt((x1 - x0)**2 + (y1 - y0)**2)
        dist = h_x*norm_x + h_y*norm_y
        pos = h_x*e_x + h_y*e_y
        max_dist = np.sqrt((x1 - x0)**2 + (y1 - y0)**2)
        bin_step = max_dist / self._num_bins
        curr_bin_max = 0
        bin_borders = [0]
        for i in range(self._num_bins):
            curr_bin_max += bin_step
            bin_borders.append(curr_bin_max)
        intensities = np.zeros(len(bin_borders))
        num_binvals = np.zeros(len(bin_borders))
        for x, y, d, p in np.nditer([X, Y, dist, pos]):
            if d <= wid / 2:
                if p < 0 or p > max_dist:
                    continue
                else:
                    for b in bin_borders:
                        ind = bin_borders.index(b)
                        if ind < len(bin_borders) - 1:
                            if p >= b and p < bin_borders[ind + 1]:
                                intensities[ind] = intensities[ind] + self.img_data[int(y), int(x)]
                                num_binvals[ind] = num_binvals[ind] + 1
                                break
        for i, n in np.nditer([intensities, num_binvals]):
            ind = np.where(intensities == i)
            if n == 0:
                vals.append(0)
            else:
                vals.append(i/n)
        bins = bin_borders
        return bins, vals, bin_step