Пример #1
0
class Graph(widgets.DOMWidget):
    """Plot and edit Planar Straight Line Graphs"""
    _view_name = Unicode('PSLGEditorView').tag(sync=True)
    _model_name = Unicode('PSLGEditorModel').tag(sync=True)
    _model_module = Unicode('ipymesh-widgets').tag(sync=True)
    _view_module = Unicode('ipymesh-widgets').tag(sync=True)
    _view_module_version = Unicode('^0.1.7').tag(sync=True)
    _model_module_version = Unicode('^0.1.7').tag(sync=True)
    _sync_toggle = Bool().tag(sync=True)
    width = Int(600).tag(sync=True)
    height = Int(600).tag(sync=True)
    Lx = Float(1.0).tag(sync=True)
    Ly = Float(1.0).tag(sync=True)
    image = Bytes().tag(sync=True)
    image_filename = Unicode('').tag(sync=True)
    x0 = Float(0.0).tag(sync=True)
    y0 = Float(0.0).tag(sync=True)
    vertices = List([[300, 300]]).tag(sync=True)
    vertexFlags = List([1]).tag(sync=True)
    segments = List([]).tag(sync=True)
    segmentFlags = List([]).tag(sync=True)
    regions = List([]).tag(sync=True)
    regionFlags = List([]).tag(sync=True)
    holes = List([]).tag(sync=True)
    boundary_type = Int(0).tag(sync=True)
    region_type = Int(0).tag(sync=True)
    add_new = Unicode('Vertex').tag(sync=True)
    xy = List().tag(sync=True)

    def __init__(self, *args, **kwargs):
        super(Graph, self).__init__(*args, **kwargs)

        if self.image_filename:
            with open(self.image_filename, 'rb') as f:
                self.image = f.read()
        self._sync_toggle = True
        self.parent = kwargs['parent']

    def sync(self):
        self._sync_toggle = not self._sync_toggle

    @observe('xy')
    def update_xy(self, change):
        self.parent.enter_x.value = self.xy[0]
        self.parent.enter_y.value = self.xy[1]
Пример #2
0
class Texture(DrawableWithCallback):
    """
    A 2D image displayed as a texture.

    By default, the texture image is mapped into the square: -0.5 < x, y < 0.5, z = 1.
    If the size (scale, aspect ratio) or position should be different then the texture should be transformed
    using the model_matrix.

    Attributes:
        binary: `bytes`.
            Image data in a specific format.
        file_format: `str`.
            Format of the data, it should be the second part of MIME format of type 'image/',
            for example 'jpeg', 'png', 'gif', 'tiff'.
        attribute: `array_like`.
            Array of float attribute for the color mapping, coresponding to each pixels.
        color_map: `list`.
            A list of float quadruplets (attribute value, R, G, B), sorted by attribute value. The first
            quadruplet should have value 0.0, the last 1.0; R, G, B are RGB color components in the range 0.0 to 1.0.
        color_range: `list`.
            A pair [min_value, max_value], which determines the levels of color attribute mapped
            to 0 and 1 in the color map respectively.
        puv: `list`.
            A list of float triplets (x,y,z). The first triplet mean a position of left-bottom corner of texture.
            Second and third triplets means a base of coordinate system for texture.
        model_matrix: `array_like`.
            4x4 model transform matrix.
    """

    type = Unicode(read_only=True).tag(sync=True)
    binary = Bytes(allow_none=True).tag(sync=True)
    file_format = Unicode(allow_none=True).tag(sync=True)
    attribute = Array().tag(sync=True, **array_serialization_wrap('attribute'))
    puv = Array(dtype=np.float32).tag(sync=True,
                                      **array_serialization_wrap('puv'))
    color_map = Array(dtype=np.float32).tag(
        sync=True, **array_serialization_wrap('color_map'))
    color_range = ListOrArray(minlen=2, maxlen=2, empty_ok=True).tag(sync=True)
    model_matrix = TimeSeries(Array(dtype=np.float32)).tag(
        sync=True, **array_serialization_wrap('model_matrix'))

    def __init__(self, **kwargs):
        super(Texture, self).__init__(**kwargs)

        self.set_trait('type', 'Texture')
Пример #3
0
class FileUpload(DescriptionWidget, ValueWidget, CoreWidget):
    """
    Upload file(s) from browser to Python kernel as bytes
    """
    _model_name = Unicode('FileUploadModel').tag(sync=True)
    _view_name = Unicode('FileUploadView').tag(sync=True)
    _counter = Int().tag(sync=True)

    accept = Unicode(help='File types to accept, empty string for all').tag(
        sync=True)
    multiple = Bool(help='If True, allow for multiple files upload').tag(
        sync=True)
    disabled = Bool(help='Enable or disable button').tag(sync=True)
    icon = Unicode(
        'upload',
        help="Font-awesome icon name, without the 'fa-' prefix.").tag(
            sync=True)
    button_style = CaselessStrEnum(
        values=['primary', 'success', 'info', 'warning', 'danger', ''],
        default_value='',
        help="""Use a predefined styling for the button.""").tag(sync=True)
    style = InstanceDict(ButtonStyle).tag(sync=True, **widget_serialization)
    metadata = List(Dict(), help='List of file metadata').tag(sync=True)
    data = List(Bytes(), help='List of file content (bytes)').tag(
        sync=True, from_json=content_from_json)
    error = Unicode(help='Error message').tag(sync=True)
    value = Dict(read_only=True)

    @observe('_counter')
    def on_incr_counter(self, change):
        """
        counter increment triggers the update of trait value
        """
        res = {}
        msg = 'Error: length of metadata and data must be equal'
        assert len(self.metadata) == len(self.data), msg
        for metadata, content in zip(self.metadata, self.data):
            name = metadata['name']
            res[name] = {'metadata': metadata, 'content': content}
        self.set_trait('value', res)

    @default('description')
    def _default_description(self):
        return 'Upload'
Пример #4
0
class Image(DOMWidget):
    """Displays an image as a widget.

    The `value` of this widget accepts a byte string.  The byte string is the raw
    image data that you want the browser to display.  You can explicitly define
    the format of the byte string using the `format` trait (which defaults to
    "png")."""
    _view_name = Unicode('ImageView', sync=True)

    # Define the custom state properties to sync with the front-end
    format = Unicode('png', sync=True)
    width = CUnicode(sync=True)
    height = CUnicode(sync=True)
    _b64value = Unicode(sync=True)

    value = Bytes()

    def _value_changed(self, name, old, new):
        self._b64value = base64.b64encode(new)
Пример #5
0
class Texture(Drawable):
    """
    A 2D image displayed as a texture.

    By default, the texture image is mapped into the square: -0.5 < x, y < 0.5, z = 1.
    If the size (scale, aspect ratio) or position should be different then the texture should be transformed
    using the model_matrix.

    Attributes:
        binary: `bytes`. Image data in a specific format.
        file_format: `str`. Format of the data, it should be the second part of MIME format of type 'image/',
            for example 'jpeg', 'png', 'gif', 'tiff'.
        model_matrix: `array_like`. 4x4 model transform matrix.
    """

    type = Unicode(default_value='Texture', read_only=True).tag(sync=True)
    binary = Bytes().tag(sync=True)
    file_format = Unicode().tag(sync=True)
    model_matrix = Array().tag(sync=True, **array_serialization)
Пример #6
0
class MediaRecorder(DOMWidget):
    """Creates a recorder which allows to record a MediaStream widget, play the
    record in the Notebook, and download it.
    """
    _model_module = Unicode('jupyter-webrtc').tag(sync=True)
    _view_module = Unicode('jupyter-webrtc').tag(sync=True)
    _model_name = Unicode('MediaRecorderModel').tag(sync=True)
    _view_name = Unicode('MediaRecorderView').tag(sync=True)
    _view_module_version = Unicode(semver_range_frontend).tag(sync=True)
    _model_module_version = Unicode(semver_range_frontend).tag(sync=True)

    stream = Instance(MediaStream, allow_none=True).tag(sync=True,
                                                        **widget_serialization)
    data = Bytes(help="The video data as a byte string.").tag(
        sync=True, from_json=_memoryview_to_bytes)
    filename = Unicode('recording').tag(sync=True)
    format = Unicode('webm').tag(sync=True)
    record = Bool(False).tag(sync=True)
    autosave = Bool(False).tag(sync=True)

    @observe('data')
    def _check_autosave(self, change):
        if len(self.data) and self.autosave:
            self.save()

    def play(self):
        self.send({'msg': 'play'})

    def download(self):
        self.send({'msg': 'download'})

    def save(self, filename=None):
        filename = filename or self.filename
        if '.' not in filename:
            filename += '.' + self.format
        if len(self.data) == 0:
            raise ValueError('No data, did you record anything?')
        with open(filename, 'wb') as f:
            f.write(self.data)

    _video_src = Unicode('').tag(sync=True)
Пример #7
0
class Image(DOMWidget, ValueWidget, CoreWidget):
    """Displays an image as a widget.

    The `value` of this widget accepts a byte string.  The byte string is the
    raw image data that you want the browser to display.  You can explicitly
    define the format of the byte string using the `format` trait (which
    defaults to "png").
    """
    _view_name = Unicode('ImageView').tag(sync=True)
    _model_name = Unicode('ImageModel').tag(sync=True)

    # Define the custom state properties to sync with the front-end
    format = Unicode('png', help="The format of the image.").tag(sync=True)
    width = CUnicode(help="Width of the image in pixels.").tag(sync=True)
    height = CUnicode(help="Height of the image in pixels.").tag(sync=True)
    _b64value = Unicode(help="The base64 encoded image data.").tag(sync=True)

    value = Bytes(help="The image data as a byte string.")

    @observe('value')
    def _value_changed(self, change):
        self._b64value = base64.b64encode(change['new'])
Пример #8
0
class ImageButton(Button):
    """An example widget."""
    _view_name = Unicode('ImageButtonView').tag(sync=True)
    _model_name = Unicode('ImageButtonModel').tag(sync=True)
    _view_module = Unicode('jupyter_cadquery').tag(sync=True)
    _model_module = Unicode('jupyter_cadquery').tag(sync=True)
    _view_module_version = Unicode('^%s' % __npm_version__).tag(sync=True)
    _model_module_version = Unicode('^%s' % __npm_version__).tag(sync=True)
    image_path = Unicode("")
    value = Bytes().tag(sync=True, **bytes_serialization)
    width = CUnicode("36", help="Width of the image in pixels.").tag(sync=True)
    height = CUnicode("28", help="Height of the image in pixels.").tag(sync=True)
    type = Unicode("").tag(sync=True)

    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        self.value = self._load_image(self.image_path)

    def _load_image(self, image_path):
        if image_path == "":
            return b""
        else:
            return open(image_path, 'rb').read()
Пример #9
0
class Image(DOMWidget, ValueWidget, CoreWidget):
    """Displays an image as a widget.

    The `value` of this widget accepts a byte string.  The byte string is the
    raw image data that you want the browser to display.  You can explicitly
    define the format of the byte string using the `format` trait (which
    defaults to "png").

    If you pass `"url"` to the `"format"` trait, `value` will be interpreted
    as a URL as bytes encoded in UTF-8.
    """
    _view_name = Unicode('ImageView').tag(sync=True)
    _model_name = Unicode('ImageModel').tag(sync=True)

    # Define the custom state properties to sync with the front-end
    format = Unicode('png', help="The format of the image.").tag(sync=True)
    width = CUnicode(help="Width of the image in pixels.").tag(sync=True)
    height = CUnicode(help="Height of the image in pixels.").tag(sync=True)
    value = Bytes(help="The image data as a byte string.").tag(sync=True)

    @classmethod
    def from_file(cls, filename, **kwargs):
        """
        Create an :class:`Image` from a local file.

        Parameters
        ----------
        filename: str
            The location of a file to read into the value from disk.

        **kwargs:
            The keyword arguments for `Image`

        Returns an `Image` with the value set from the filename.
        """
        value = cls._load_file_value(filename)

        if 'format' not in kwargs:
            img_format = cls._guess_format(filename)
            if img_format is not None:
                kwargs['format'] = img_format

        return cls(value=value, **kwargs)

    @classmethod
    def from_url(cls, url, **kwargs):
        """
        Create an :class:`Image` from a URL.

        :code:`Image.from_url(url)` is equivalent to:

        .. code-block: python

            img = Image(value=url, format='url')

        But both unicode and bytes arguments are allowed for ``url``.

        Parameters
        ----------
        url: [str, bytes]
            The location of a URL to load.
        """
        if isinstance(url, _text_type):
            # If unicode (str in Python 3), it needs to be encoded to bytes
            url = url.encode('utf-8')

        return cls(value=url, format='url')

    def set_value_from_file(self, filename):
        """
        Convenience method for reading a file into `value`.

        Parameters
        ----------
        filename: str
            The location of a file to read into value from disk.
        """
        value = self._load_file_value(filename)

        self.value = value

    @classmethod
    def _load_file_value(cls, filename):
        if getattr(filename, 'read', None) is not None:
            return filename.read()
        else:
            with open(filename, 'rb') as f:
                return f.read()

    @classmethod
    def _guess_format(cls, filename):
        # file objects may have a .name parameter
        name = getattr(filename, 'name', None)
        name = name or filename

        try:
            mtype, _ = mimetypes.guess_type(name)
            if not mtype.startswith('image/'):
                return None

            return mtype[len('image/'):]
        except Exception:
            return None

    def __repr__(self):
        # Truncate the value in the repr, since it will
        # typically be very, very large.
        class_name = self.__class__.__name__

        # Return value first like a ValueWidget
        signature = []
        sig_value = repr(self.value)
        prefix, rest = sig_value.split("'", 1)
        content = rest[:-1]
        if len(content) > 100:
            sig_value = "{}'{}...'".format(prefix, content[0:100])
        signature.append('%s=%s' % ('value', sig_value))

        for key in super(Image, self)._repr_keys():
            if key == 'value':
                continue
            value = str(getattr(self, key))
            signature.append('%s=%r' % (key, value))
        signature = ', '.join(signature)
        return '%s(%s)' % (class_name, signature)
Пример #10
0
class JupyterHub(Application):
    """An Application for starting a Multi-User Jupyter Notebook server."""
    name = 'jupyterhub'
    version = jupyterhub.__version__
    
    description = """Start a multi-user Jupyter Notebook server
    
    Spawns a configurable-http-proxy and multi-user Hub,
    which authenticates users and spawns single-user Notebook servers
    on behalf of users.
    """
    
    examples = """
    
    generate default config file:
    
        jupyterhub --generate-config -f /etc/jupyterhub/jupyterhub.py
    
    spawn the server on 10.0.1.2:443 with https:
    
        jupyterhub --ip 10.0.1.2 --port 443 --ssl-key my_ssl.key --ssl-cert my_ssl.cert
    """
    
    aliases = Dict(aliases)
    flags = Dict(flags)
    
    subcommands = {
        'token': (NewToken, "Generate an API token for a user")
    }
    
    classes = List([
        Spawner,
        LocalProcessSpawner,
        Authenticator,
        PAMAuthenticator,
    ])
    
    config_file = Unicode('jupyterhub_config.py', config=True,
        help="The config file to load",
    )
    generate_config = Bool(False, config=True,
        help="Generate default config file",
    )
    answer_yes = Bool(False, config=True,
        help="Answer yes to any questions (e.g. confirm overwrite)"
    )
    pid_file = Unicode('', config=True,
        help="""File to write PID
        Useful for daemonizing jupyterhub.
        """
    )
    cookie_max_age_days = Float(14, config=True,
        help="""Number of days for a login cookie to be valid.
        Default is two weeks.
        """
    )
    last_activity_interval = Integer(300, config=True,
        help="Interval (in seconds) at which to update last-activity timestamps."
    )
    proxy_check_interval = Integer(30, config=True,
        help="Interval (in seconds) at which to check if the proxy is running."
    )
    
    data_files_path = Unicode(DATA_FILES_PATH, config=True,
        help="The location of jupyterhub data files (e.g. /usr/local/share/jupyter/hub)"
    )

    template_paths = List(
        config=True,
        help="Paths to search for jinja templates.",
    )

    def _template_paths_default(self):
        return [os.path.join(self.data_files_path, 'templates')]

    ssl_key = Unicode('', config=True,
        help="""Path to SSL key file for the public facing interface of the proxy
        
        Use with ssl_cert
        """
    )
    ssl_cert = Unicode('', config=True,
        help="""Path to SSL certificate file for the public facing interface of the proxy
        
        Use with ssl_key
        """
    )
    ip = Unicode('', config=True,
        help="The public facing ip of the proxy"
    )
    port = Integer(8000, config=True,
        help="The public facing port of the proxy"
    )
    base_url = URLPrefix('/', config=True,
        help="The base URL of the entire application"
    )
    
    jinja_environment_options = Dict(config=True,
        help="Supply extra arguments that will be passed to Jinja environment."
    )
    
    proxy_cmd = Command('configurable-http-proxy', config=True,
        help="""The command to start the http proxy.
        
        Only override if configurable-http-proxy is not on your PATH
        """
    )
    debug_proxy = Bool(False, config=True, help="show debug output in configurable-http-proxy")
    proxy_auth_token = Unicode(config=True,
        help="""The Proxy Auth token.

        Loaded from the CONFIGPROXY_AUTH_TOKEN env variable by default.
        """
    )
    def _proxy_auth_token_default(self):
        token = os.environ.get('CONFIGPROXY_AUTH_TOKEN', None)
        if not token:
            self.log.warn('\n'.join([
                "",
                "Generating CONFIGPROXY_AUTH_TOKEN. Restarting the Hub will require restarting the proxy.",
                "Set CONFIGPROXY_AUTH_TOKEN env or JupyterHub.proxy_auth_token config to avoid this message.",
                "",
            ]))
            token = orm.new_token()
        return token
    
    proxy_api_ip = Unicode('localhost', config=True,
        help="The ip for the proxy API handlers"
    )
    proxy_api_port = Integer(config=True,
        help="The port for the proxy API handlers"
    )
    def _proxy_api_port_default(self):
        return self.port + 1
    
    hub_port = Integer(8081, config=True,
        help="The port for this process"
    )
    hub_ip = Unicode('localhost', config=True,
        help="The ip for this process"
    )
    
    hub_prefix = URLPrefix('/hub/', config=True,
        help="The prefix for the hub server. Must not be '/'"
    )
    def _hub_prefix_default(self):
        return url_path_join(self.base_url, '/hub/')
    
    def _hub_prefix_changed(self, name, old, new):
        if new == '/':
            raise TraitError("'/' is not a valid hub prefix")
        if not new.startswith(self.base_url):
            self.hub_prefix = url_path_join(self.base_url, new)
    
    cookie_secret = Bytes(config=True, env='JPY_COOKIE_SECRET',
        help="""The cookie secret to use to encrypt cookies.

        Loaded from the JPY_COOKIE_SECRET env variable by default.
        """
    )
    
    cookie_secret_file = Unicode('jupyterhub_cookie_secret', config=True,
        help="""File in which to store the cookie secret."""
    )
    
    authenticator_class = Type(PAMAuthenticator, Authenticator,
        config=True,
        help="""Class for authenticating users.
        
        This should be a class with the following form:
        
        - constructor takes one kwarg: `config`, the IPython config object.
        
        - is a tornado.gen.coroutine
        - returns username on success, None on failure
        - takes two arguments: (handler, data),
          where `handler` is the calling web.RequestHandler,
          and `data` is the POST form data from the login page.
        """
    )
    
    authenticator = Instance(Authenticator)
    def _authenticator_default(self):
        return self.authenticator_class(parent=self, db=self.db)

    # class for spawning single-user servers
    spawner_class = Type(LocalProcessSpawner, Spawner,
        config=True,
        help="""The class to use for spawning single-user servers.
        
        Should be a subclass of Spawner.
        """
    )
    
    db_url = Unicode('sqlite:///jupyterhub.sqlite', config=True,
        help="url for the database. e.g. `sqlite:///jupyterhub.sqlite`"
    )
    def _db_url_changed(self, name, old, new):
        if '://' not in new:
            # assume sqlite, if given as a plain filename
            self.db_url = 'sqlite:///%s' % new

    db_kwargs = Dict(config=True,
        help="""Include any kwargs to pass to the database connection.
        See sqlalchemy.create_engine for details.
        """
    )

    reset_db = Bool(False, config=True,
        help="Purge and reset the database."
    )
    debug_db = Bool(False, config=True,
        help="log all database transactions. This has A LOT of output"
    )
    session_factory = Any()
    
    admin_access = Bool(False, config=True,
        help="""Grant admin users permission to access single-user servers.
        
        Users should be properly informed if this is enabled.
        """
    )
    admin_users = Set(config=True,
        help="""DEPRECATED, use Authenticator.admin_users instead."""
    )
    
    tornado_settings = Dict(config=True)
    
    cleanup_servers = Bool(True, config=True,
        help="""Whether to shutdown single-user servers when the Hub shuts down.
        
        Disable if you want to be able to teardown the Hub while leaving the single-user servers running.
        
        If both this and cleanup_proxy are False, sending SIGINT to the Hub will
        only shutdown the Hub, leaving everything else running.
        
        The Hub should be able to resume from database state.
        """
    )

    cleanup_proxy = Bool(True, config=True,
        help="""Whether to shutdown the proxy when the Hub shuts down.
        
        Disable if you want to be able to teardown the Hub while leaving the proxy running.
        
        Only valid if the proxy was starting by the Hub process.
        
        If both this and cleanup_servers are False, sending SIGINT to the Hub will
        only shutdown the Hub, leaving everything else running.
        
        The Hub should be able to resume from database state.
        """
    )
    
    handlers = List()
    
    _log_formatter_cls = CoroutineLogFormatter
    http_server = None
    proxy_process = None
    io_loop = None
    
    def _log_level_default(self):
        return logging.INFO
    
    def _log_datefmt_default(self):
        """Exclude date from default date format"""
        return "%Y-%m-%d %H:%M:%S"

    def _log_format_default(self):
        """override default log format to include time"""
        return "%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s %(module)s:%(lineno)d]%(end_color)s %(message)s"

    extra_log_file = Unicode(
        "",
        config=True,
        help="Set a logging.FileHandler on this file."
    )
    extra_log_handlers = List(
        Instance(logging.Handler),
        config=True,
        help="Extra log handlers to set on JupyterHub logger",
    )

    def init_logging(self):
        # This prevents double log messages because tornado use a root logger that
        # self.log is a child of. The logging module dipatches log messages to a log
        # and all of its ancenstors until propagate is set to False.
        self.log.propagate = False

        if self.extra_log_file:
            self.extra_log_handlers.append(
                logging.FileHandler(self.extra_log_file)
            )

        _formatter = self._log_formatter_cls(
            fmt=self.log_format,
            datefmt=self.log_datefmt,
        )
        for handler in self.extra_log_handlers:
            if handler.formatter is None:
                handler.setFormatter(_formatter)
            self.log.addHandler(handler)

        # hook up tornado 3's loggers to our app handlers
        for log in (app_log, access_log, gen_log):
            # ensure all log statements identify the application they come from
            log.name = self.log.name
        logger = logging.getLogger('tornado')
        logger.propagate = True
        logger.parent = self.log
        logger.setLevel(self.log.level)

    def init_ports(self):
        if self.hub_port == self.port:
            raise TraitError("The hub and proxy cannot both listen on port %i" % self.port)
        if self.hub_port == self.proxy_api_port:
            raise TraitError("The hub and proxy API cannot both listen on port %i" % self.hub_port)
        if self.proxy_api_port == self.port:
            raise TraitError("The proxy's public and API ports cannot both be %i" % self.port)
    
    @staticmethod
    def add_url_prefix(prefix, handlers):
        """add a url prefix to handlers"""
        for i, tup in enumerate(handlers):
            lis = list(tup)
            lis[0] = url_path_join(prefix, tup[0])
            handlers[i] = tuple(lis)
        return handlers
    
    def init_handlers(self):
        h = []
        h.extend(handlers.default_handlers)
        h.extend(apihandlers.default_handlers)
        # load handlers from the authenticator
        h.extend(self.authenticator.get_handlers(self))

        self.handlers = self.add_url_prefix(self.hub_prefix, h)

        # some extra handlers, outside hub_prefix
        self.handlers.extend([
            (r"%s" % self.hub_prefix.rstrip('/'), web.RedirectHandler,
                {
                    "url": self.hub_prefix,
                    "permanent": False,
                }
            ),
            (r"(?!%s).*" % self.hub_prefix, handlers.PrefixRedirectHandler),
            (r'(.*)', handlers.Template404),
        ])
    
    def _check_db_path(self, path):
        """More informative log messages for failed filesystem access"""
        path = os.path.abspath(path)
        parent, fname = os.path.split(path)
        user = getuser()
        if not os.path.isdir(parent):
            self.log.error("Directory %s does not exist", parent)
        if os.path.exists(parent) and not os.access(parent, os.W_OK):
            self.log.error("%s cannot create files in %s", user, parent)
        if os.path.exists(path) and not os.access(path, os.W_OK):
            self.log.error("%s cannot edit %s", user, path)
    
    def init_secrets(self):
        trait_name = 'cookie_secret'
        trait = self.traits()[trait_name]
        env_name = trait.get_metadata('env')
        secret_file = os.path.abspath(
            os.path.expanduser(self.cookie_secret_file)
        )
        secret = self.cookie_secret
        secret_from = 'config'
        # load priority: 1. config, 2. env, 3. file
        if not secret and os.environ.get(env_name):
            secret_from = 'env'
            self.log.info("Loading %s from env[%s]", trait_name, env_name)
            secret = binascii.a2b_hex(os.environ[env_name])
        if not secret and os.path.exists(secret_file):
            secret_from = 'file'
            perm = os.stat(secret_file).st_mode
            if perm & 0o077:
                self.log.error("Bad permissions on %s", secret_file)
            else:
                self.log.info("Loading %s from %s", trait_name, secret_file)
                with open(secret_file) as f:
                    b64_secret = f.read()
                try:
                    secret = binascii.a2b_base64(b64_secret)
                except Exception as e:
                    self.log.error("%s does not contain b64 key: %s", secret_file, e)
        if not secret:
            secret_from = 'new'
            self.log.debug("Generating new %s", trait_name)
            secret = os.urandom(SECRET_BYTES)
        
        if secret_file and secret_from == 'new':
            # if we generated a new secret, store it in the secret_file
            self.log.info("Writing %s to %s", trait_name, secret_file)
            b64_secret = binascii.b2a_base64(secret).decode('ascii')
            with open(secret_file, 'w') as f:
                f.write(b64_secret)
            try:
                os.chmod(secret_file, 0o600)
            except OSError:
                self.log.warn("Failed to set permissions on %s", secret_file)
        # store the loaded trait value
        self.cookie_secret = secret
    
    # thread-local storage of db objects
    _local = Instance(threading.local, ())
    @property
    def db(self):
        if not hasattr(self._local, 'db'):
            self._local.db = scoped_session(self.session_factory)()
        return self._local.db
    
    @property
    def hub(self):
        if not getattr(self._local, 'hub', None):
            q = self.db.query(orm.Hub)
            assert q.count() <= 1
            self._local.hub = q.first()
        return self._local.hub
    
    @hub.setter
    def hub(self, hub):
        self._local.hub = hub
    
    @property
    def proxy(self):
        if not getattr(self._local, 'proxy', None):
            q = self.db.query(orm.Proxy)
            assert q.count() <= 1
            p = self._local.proxy = q.first()
            if p:
                p.auth_token = self.proxy_auth_token
        return self._local.proxy
    
    @proxy.setter
    def proxy(self, proxy):
        self._local.proxy = proxy
    
    def init_db(self):
        """Create the database connection"""
        self.log.debug("Connecting to db: %s", self.db_url)
        try:
            self.session_factory = orm.new_session_factory(
                self.db_url,
                reset=self.reset_db,
                echo=self.debug_db,
                **self.db_kwargs
            )
            # trigger constructing thread local db property
            _ = self.db
        except OperationalError as e:
            self.log.error("Failed to connect to db: %s", self.db_url)
            self.log.debug("Database error was:", exc_info=True)
            if self.db_url.startswith('sqlite:///'):
                self._check_db_path(self.db_url.split(':///', 1)[1])
            self.exit(1)
    
    def init_hub(self):
        """Load the Hub config into the database"""
        self.hub = self.db.query(orm.Hub).first()
        if self.hub is None:
            self.hub = orm.Hub(
                server=orm.Server(
                    ip=self.hub_ip,
                    port=self.hub_port,
                    base_url=self.hub_prefix,
                    cookie_name='jupyter-hub-token',
                )
            )
            self.db.add(self.hub)
        else:
            server = self.hub.server
            server.ip = self.hub_ip
            server.port = self.hub_port
            server.base_url = self.hub_prefix

        self.db.commit()
    
    @gen.coroutine
    def init_users(self):
        """Load users into and from the database"""
        db = self.db
        
        if self.admin_users and not self.authenticator.admin_users:
            self.log.warn(
                "\nJupyterHub.admin_users is deprecated."
                "\nUse Authenticator.admin_users instead."
            )
            self.authenticator.admin_users = self.admin_users
        admin_users = self.authenticator.admin_users
        
        if not admin_users:
            # add current user as admin if there aren't any others
            admins = db.query(orm.User).filter(orm.User.admin==True)
            if admins.first() is None:
                admin_users.add(getuser())
        
        new_users = []

        for name in admin_users:
            # ensure anyone specified as admin in config is admin in db
            user = orm.User.find(db, name)
            if user is None:
                user = orm.User(name=name, admin=True)
                new_users.append(user)
                db.add(user)
            else:
                user.admin = True

        # the admin_users config variable will never be used after this point.
        # only the database values will be referenced.

        whitelist = self.authenticator.whitelist

        if not whitelist:
            self.log.info("Not using whitelist. Any authenticated user will be allowed.")

        # add whitelisted users to the db
        for name in whitelist:
            user = orm.User.find(db, name)
            if user is None:
                user = orm.User(name=name)
                new_users.append(user)
                db.add(user)

        if whitelist:
            # fill the whitelist with any users loaded from the db,
            # so we are consistent in both directions.
            # This lets whitelist be used to set up initial list,
            # but changes to the whitelist can occur in the database,
            # and persist across sessions.
            for user in db.query(orm.User):
                whitelist.add(user.name)

        # The whitelist set and the users in the db are now the same.
        # From this point on, any user changes should be done simultaneously
        # to the whitelist set and user db, unless the whitelist is empty (all users allowed).

        db.commit()
        
        for user in new_users:
            yield gen.maybe_future(self.authenticator.add_user(user))
        db.commit()
    
    @gen.coroutine
    def init_spawners(self):
        db = self.db
        
        user_summaries = ['']
        def _user_summary(user):
            parts = ['{0: >8}'.format(user.name)]
            if user.admin:
                parts.append('admin')
            if user.server:
                parts.append('running at %s' % user.server)
            return ' '.join(parts)
            
        @gen.coroutine
        def user_stopped(user):
            status = yield user.spawner.poll()
            self.log.warn("User %s server stopped with exit code: %s",
                user.name, status,
            )
            yield self.proxy.delete_user(user)
            yield user.stop()
        
        for user in db.query(orm.User):
            if not user.state:
                # without spawner state, server isn't valid
                user.server = None
                user_summaries.append(_user_summary(user))
                continue
            self.log.debug("Loading state for %s from db", user.name)
            user.spawner = spawner = self.spawner_class(
                user=user, hub=self.hub, config=self.config, db=self.db,
            )
            status = yield spawner.poll()
            if status is None:
                self.log.info("%s still running", user.name)
                spawner.add_poll_callback(user_stopped, user)
                spawner.start_polling()
            else:
                # user not running. This is expected if server is None,
                # but indicates the user's server died while the Hub wasn't running
                # if user.server is defined.
                log = self.log.warn if user.server else self.log.debug
                log("%s not running.", user.name)
                user.server = None

            user_summaries.append(_user_summary(user))

        self.log.debug("Loaded users: %s", '\n'.join(user_summaries))
        db.commit()

    def init_proxy(self):
        """Load the Proxy config into the database"""
        self.proxy = self.db.query(orm.Proxy).first()
        if self.proxy is None:
            self.proxy = orm.Proxy(
                public_server=orm.Server(),
                api_server=orm.Server(),
            )
            self.db.add(self.proxy)
            self.db.commit()
        self.proxy.auth_token = self.proxy_auth_token # not persisted
        self.proxy.log = self.log
        self.proxy.public_server.ip = self.ip
        self.proxy.public_server.port = self.port
        self.proxy.api_server.ip = self.proxy_api_ip
        self.proxy.api_server.port = self.proxy_api_port
        self.proxy.api_server.base_url = '/api/routes/'
        self.db.commit()
    
    @gen.coroutine
    def start_proxy(self):
        """Actually start the configurable-http-proxy"""
        # check for proxy
        if self.proxy.public_server.is_up() or self.proxy.api_server.is_up():
            # check for *authenticated* access to the proxy (auth token can change)
            try:
                yield self.proxy.get_routes()
            except (HTTPError, OSError, socket.error) as e:
                if isinstance(e, HTTPError) and e.code == 403:
                    msg = "Did CONFIGPROXY_AUTH_TOKEN change?"
                else:
                    msg = "Is something else using %s?" % self.proxy.public_server.bind_url
                self.log.error("Proxy appears to be running at %s, but I can't access it (%s)\n%s",
                    self.proxy.public_server.bind_url, e, msg)
                self.exit(1)
                return
            else:
                self.log.info("Proxy already running at: %s", self.proxy.public_server.bind_url)
            self.proxy_process = None
            return

        env = os.environ.copy()
        env['CONFIGPROXY_AUTH_TOKEN'] = self.proxy.auth_token
        cmd = self.proxy_cmd + [
            '--ip', self.proxy.public_server.ip,
            '--port', str(self.proxy.public_server.port),
            '--api-ip', self.proxy.api_server.ip,
            '--api-port', str(self.proxy.api_server.port),
            '--default-target', self.hub.server.host,
        ]
        if self.debug_proxy:
            cmd.extend(['--log-level', 'debug'])
        if self.ssl_key:
            cmd.extend(['--ssl-key', self.ssl_key])
        if self.ssl_cert:
            cmd.extend(['--ssl-cert', self.ssl_cert])
        self.log.info("Starting proxy @ %s", self.proxy.public_server.bind_url)
        self.log.debug("Proxy cmd: %s", cmd)
        try:
            self.proxy_process = Popen(cmd, env=env)
        except FileNotFoundError as e:
            self.log.error(
                "Failed to find proxy %r\n"
                "The proxy can be installed with `npm install -g configurable-http-proxy`"
                 % self.proxy_cmd
            )
            self.exit(1)
        def _check():
            status = self.proxy_process.poll()
            if status is not None:
                e = RuntimeError("Proxy failed to start with exit code %i" % status)
                # py2-compatible `raise e from None`
                e.__cause__ = None
                raise e
        
        for server in (self.proxy.public_server, self.proxy.api_server):
            for i in range(10):
                _check()
                try:
                    yield server.wait_up(1)
                except TimeoutError:
                    continue
                else:
                    break
            yield server.wait_up(1)
        self.log.debug("Proxy started and appears to be up")
    
    @gen.coroutine
    def check_proxy(self):
        if self.proxy_process.poll() is None:
            return
        self.log.error("Proxy stopped with exit code %r",
            'unknown' if self.proxy_process is None else self.proxy_process.poll()
        )
        yield self.start_proxy()
        self.log.info("Setting up routes on new proxy")
        yield self.proxy.add_all_users()
        self.log.info("New proxy back up, and good to go")
    
    def init_tornado_settings(self):
        """Set up the tornado settings dict."""
        base_url = self.hub.server.base_url
        jinja_env = Environment(
            loader=FileSystemLoader(self.template_paths),
            **self.jinja_environment_options
        )
        
        login_url = self.authenticator.login_url(base_url)
        logout_url = self.authenticator.logout_url(base_url)
        
        # if running from git, disable caching of require.js
        # otherwise cache based on server start time
        parent = os.path.dirname(os.path.dirname(jupyterhub.__file__))
        if os.path.isdir(os.path.join(parent, '.git')):
            version_hash = ''
        else:
            version_hash=datetime.now().strftime("%Y%m%d%H%M%S"),
        
        settings = dict(
            log_function=log_request,
            config=self.config,
            log=self.log,
            db=self.db,
            proxy=self.proxy,
            hub=self.hub,
            admin_users=self.authenticator.admin_users,
            admin_access=self.admin_access,
            authenticator=self.authenticator,
            spawner_class=self.spawner_class,
            base_url=self.base_url,
            cookie_secret=self.cookie_secret,
            cookie_max_age_days=self.cookie_max_age_days,
            login_url=login_url,
            logout_url=logout_url,
            static_path=os.path.join(self.data_files_path, 'static'),
            static_url_prefix=url_path_join(self.hub.server.base_url, 'static/'),
            static_handler_class=CacheControlStaticFilesHandler,
            template_path=self.template_paths,
            jinja2_env=jinja_env,
            version_hash=version_hash,
        )
        # allow configured settings to have priority
        settings.update(self.tornado_settings)
        self.tornado_settings = settings
    
    def init_tornado_application(self):
        """Instantiate the tornado Application object"""
        self.tornado_application = web.Application(self.handlers, **self.tornado_settings)
    
    def write_pid_file(self):
        pid = os.getpid()
        if self.pid_file:
            self.log.debug("Writing PID %i to %s", pid, self.pid_file)
            with open(self.pid_file, 'w') as f:
                f.write('%i' % pid)
    
    @gen.coroutine
    @catch_config_error
    def initialize(self, *args, **kwargs):
        super().initialize(*args, **kwargs)
        if self.generate_config or self.subapp:
            return
        self.load_config_file(self.config_file)
        self.init_logging()
        if 'JupyterHubApp' in self.config:
            self.log.warn("Use JupyterHub in config, not JupyterHubApp. Outdated config:\n%s",
                '\n'.join('JupyterHubApp.{key} = {value!r}'.format(key=key, value=value)
                    for key, value in self.config.JupyterHubApp.items()
                )
            )
            cfg = self.config.copy()
            cfg.JupyterHub.merge(cfg.JupyterHubApp)
            self.update_config(cfg)
        self.write_pid_file()
        self.init_ports()
        self.init_secrets()
        self.init_db()
        self.init_hub()
        self.init_proxy()
        yield self.init_users()
        yield self.init_spawners()
        self.init_handlers()
        self.init_tornado_settings()
        self.init_tornado_application()
    
    @gen.coroutine
    def cleanup(self):
        """Shutdown our various subprocesses and cleanup runtime files."""
        
        futures = []
        if self.cleanup_servers:
            self.log.info("Cleaning up single-user servers...")
            # request (async) process termination
            for user in self.db.query(orm.User):
                if user.spawner is not None:
                    futures.append(user.stop())
        else:
            self.log.info("Leaving single-user servers running")
        
        # clean up proxy while SUS are shutting down
        if self.cleanup_proxy:
            if self.proxy_process:
                self.log.info("Cleaning up proxy[%i]...", self.proxy_process.pid)
                if self.proxy_process.poll() is None:
                    try:
                        self.proxy_process.terminate()
                    except Exception as e:
                        self.log.error("Failed to terminate proxy process: %s", e)
            else:
                self.log.info("I didn't start the proxy, I can't clean it up")
        else:
            self.log.info("Leaving proxy running")
        
        
        # wait for the requests to stop finish:
        for f in futures:
            try:
                yield f
            except Exception as e:
                self.log.error("Failed to stop user: %s", e)
        
        self.db.commit()
        
        if self.pid_file and os.path.exists(self.pid_file):
            self.log.info("Cleaning up PID file %s", self.pid_file)
            os.remove(self.pid_file)
        
        # finally stop the loop once we are all cleaned up
        self.log.info("...done")
    
    def write_config_file(self):
        """Write our default config to a .py config file"""
        if os.path.exists(self.config_file) and not self.answer_yes:
            answer = ''
            def ask():
                prompt = "Overwrite %s with default config? [y/N]" % self.config_file
                try:
                    return input(prompt).lower() or 'n'
                except KeyboardInterrupt:
                    print('') # empty line
                    return 'n'
            answer = ask()
            while not answer.startswith(('y', 'n')):
                print("Please answer 'yes' or 'no'")
                answer = ask()
            if answer.startswith('n'):
                return
        
        config_text = self.generate_config_file()
        if isinstance(config_text, bytes):
            config_text = config_text.decode('utf8')
        print("Writing default config to: %s" % self.config_file)
        with open(self.config_file, mode='w') as f:
            f.write(config_text)
    
    @gen.coroutine
    def update_last_activity(self):
        """Update User.last_activity timestamps from the proxy"""
        routes = yield self.proxy.get_routes()
        for prefix, route in routes.items():
            if 'user' not in route:
                # not a user route, ignore it
                continue
            user = orm.User.find(self.db, route['user'])
            if user is None:
                self.log.warn("Found no user for route: %s", route)
                continue
            try:
                dt = datetime.strptime(route['last_activity'], ISO8601_ms)
            except Exception:
                dt = datetime.strptime(route['last_activity'], ISO8601_s)
            user.last_activity = max(user.last_activity, dt)

        self.db.commit()
        yield self.proxy.check_routes(routes)
    
    @gen.coroutine
    def start(self):
        """Start the whole thing"""
        self.io_loop = loop = IOLoop.current()
        
        if self.subapp:
            self.subapp.start()
            loop.stop()
            return
        
        if self.generate_config:
            self.write_config_file()
            loop.stop()
            return
        
        # start the webserver
        self.http_server = tornado.httpserver.HTTPServer(self.tornado_application, xheaders=True)
        try:
            self.http_server.listen(self.hub_port, address=self.hub_ip)
        except Exception:
            self.log.error("Failed to bind hub to %s", self.hub.server.bind_url)
            raise
        else:
            self.log.info("Hub API listening on %s", self.hub.server.bind_url)
        
        # start the proxy
        try:
            yield self.start_proxy()
        except Exception as e:
            self.log.critical("Failed to start proxy", exc_info=True)
            self.exit(1)
            return
        
        loop.add_callback(self.proxy.add_all_users)
        
        if self.proxy_process:
            # only check / restart the proxy if we started it in the first place.
            # this means a restarted Hub cannot restart a Proxy that its
            # predecessor started.
            pc = PeriodicCallback(self.check_proxy, 1e3 * self.proxy_check_interval)
            pc.start()
        
        if self.last_activity_interval:
            pc = PeriodicCallback(self.update_last_activity, 1e3 * self.last_activity_interval)
            pc.start()

        self.log.info("JupyterHub is now running at %s", self.proxy.public_server.url)
        # register cleanup on both TERM and INT
        atexit.register(self.atexit)
        self.init_signal()

    def init_signal(self):
        signal.signal(signal.SIGTERM, self.sigterm)
    
    def sigterm(self, signum, frame):
        self.log.critical("Received SIGTERM, shutting down")
        self.io_loop.stop()
        self.atexit()
    
    _atexit_ran = False
    def atexit(self):
        """atexit callback"""
        if self._atexit_ran:
            return
        self._atexit_ran = True
        # run the cleanup step (in a new loop, because the interrupted one is unclean)
        IOLoop.clear_current()
        loop = IOLoop()
        loop.make_current()
        loop.run_sync(self.cleanup)
        
    
    def stop(self):
        if not self.io_loop:
            return
        if self.http_server:
            if self.io_loop._running:
                self.io_loop.add_callback(self.http_server.stop)
            else:
                self.http_server.stop()
        self.io_loop.add_callback(self.io_loop.stop)
    
    @gen.coroutine
    def launch_instance_async(self, argv=None):
        try:
            yield self.initialize(argv)
            yield self.start()
        except Exception as e:
            self.log.exception("")
            self.exit(1)
    
    @classmethod
    def launch_instance(cls, argv=None):
        self = cls.instance()
        loop = IOLoop.current()
        loop.add_callback(self.launch_instance_async, argv)
        try:
            loop.start()
        except KeyboardInterrupt:
            print("\nInterrupted")
Пример #11
0
class NotebookNotary(LoggingConfigurable):
    """A class for computing and verifying notebook signatures."""

    data_dir = Unicode(help="""The storage directory for notary secret and database.""").tag(
        config=True
    )

    @default("data_dir")
    def _data_dir_default(self):
        app = None
        try:
            if JupyterApp.initialized():
                app = JupyterApp.instance()
        except MultipleInstanceError:
            pass
        if app is None:
            # create an app, without the global instance
            app = JupyterApp()
            app.initialize(argv=[])
        return app.data_dir

    store_factory = Callable(
        help="""A callable returning the storage backend for notebook signatures.
         The default uses an SQLite database."""
    ).tag(config=True)

    @default("store_factory")
    def _store_factory_default(self):
        def factory():
            if sqlite3 is None:
                self.log.warning("Missing SQLite3, all notebooks will be untrusted!")
                return MemorySignatureStore()
            return SQLiteSignatureStore(self.db_file)

        return factory

    db_file = Unicode(
        help="""The sqlite file in which to store notebook signatures.
        By default, this will be in your Jupyter data directory.
        You can set it to ':memory:' to disable sqlite writing to the filesystem.
        """
    ).tag(config=True)

    @default("db_file")
    def _db_file_default(self):
        if not self.data_dir:
            return ":memory:"
        return os.path.join(self.data_dir, "nbsignatures.db")

    algorithm = Enum(
        algorithms, default_value="sha256", help="""The hashing algorithm used to sign notebooks."""
    ).tag(config=True)

    @observe("algorithm")
    def _algorithm_changed(self, change):
        self.digestmod = getattr(hashlib, change.new)

    digestmod = Any()

    @default("digestmod")
    def _digestmod_default(self):
        return getattr(hashlib, self.algorithm)

    secret_file = Unicode(help="""The file where the secret key is stored.""").tag(config=True)

    @default("secret_file")
    def _secret_file_default(self):
        if not self.data_dir:
            return ""
        return os.path.join(self.data_dir, "notebook_secret")

    secret = Bytes(help="""The secret key with which notebooks are signed.""").tag(config=True)

    @default("secret")
    def _secret_default(self):
        # note : this assumes an Application is running
        if os.path.exists(self.secret_file):
            with open(self.secret_file, "rb") as f:
                return f.read()
        else:
            secret = encodebytes(os.urandom(1024))
            self._write_secret_file(secret)
            return secret

    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        self.store = self.store_factory()

    def _write_secret_file(self, secret):
        """write my secret to my secret_file"""
        self.log.info("Writing notebook-signing key to %s", self.secret_file)
        with open(self.secret_file, "wb") as f:
            f.write(secret)
        try:
            os.chmod(self.secret_file, 0o600)
        except OSError:
            self.log.warning("Could not set permissions on %s", self.secret_file)
        return secret

    def compute_signature(self, nb):
        """Compute a notebook's signature

        by hashing the entire contents of the notebook via HMAC digest.
        """
        hmac = HMAC(self.secret, digestmod=self.digestmod)
        # don't include the previous hash in the content to hash
        with signature_removed(nb):
            # sign the whole thing
            for b in yield_everything(nb):
                hmac.update(b)

        return hmac.hexdigest()

    def check_signature(self, nb):
        """Check a notebook's stored signature

        If a signature is stored in the notebook's metadata,
        a new signature is computed and compared with the stored value.

        Returns True if the signature is found and matches, False otherwise.

        The following conditions must all be met for a notebook to be trusted:
        - a signature is stored in the form 'scheme:hexdigest'
        - the stored scheme matches the requested scheme
        - the requested scheme is available from hashlib
        - the computed hash from notebook_signature matches the stored hash
        """
        if nb.nbformat < 3:
            return False
        signature = self.compute_signature(nb)
        return self.store.check_signature(signature, self.algorithm)

    def sign(self, nb):
        """Sign a notebook, indicating that its output is trusted on this machine

        Stores hash algorithm and hmac digest in a local database of trusted notebooks.
        """
        if nb.nbformat < 3:
            return
        signature = self.compute_signature(nb)
        self.store.store_signature(signature, self.algorithm)

    def unsign(self, nb):
        """Ensure that a notebook is untrusted

        by removing its signature from the trusted database, if present.
        """
        signature = self.compute_signature(nb)
        self.store.remove_signature(signature, self.algorithm)

    def mark_cells(self, nb, trusted):
        """Mark cells as trusted if the notebook's signature can be verified

        Sets ``cell.metadata.trusted = True | False`` on all code cells,
        depending on the *trusted* parameter. This will typically be the return
        value from ``self.check_signature(nb)``.

        This function is the inverse of check_cells
        """
        if nb.nbformat < 3:
            return

        for cell in yield_code_cells(nb):
            cell["metadata"]["trusted"] = trusted

    def _check_cell(self, cell, nbformat_version):
        """Do we trust an individual cell?

        Return True if:

        - cell is explicitly trusted
        - cell has no potentially unsafe rich output

        If a cell has no output, or only simple print statements,
        it will always be trusted.
        """
        # explicitly trusted
        if cell["metadata"].pop("trusted", False):
            return True

        # explicitly safe output
        if nbformat_version >= 4:
            unsafe_output_types = ["execute_result", "display_data"]
            safe_keys = {"output_type", "execution_count", "metadata"}
        else:  # v3
            unsafe_output_types = ["pyout", "display_data"]
            safe_keys = {"output_type", "prompt_number", "metadata"}

        for output in cell["outputs"]:
            output_type = output["output_type"]
            if output_type in unsafe_output_types:
                # if there are any data keys not in the safe whitelist
                output_keys = set(output)
                if output_keys.difference(safe_keys):
                    return False

        return True

    def check_cells(self, nb):
        """Return whether all code cells are trusted.

        A cell is trusted if the 'trusted' field in its metadata is truthy, or
        if it has no potentially unsafe outputs.
        If there are no code cells, return True.

        This function is the inverse of mark_cells.
        """
        if nb.nbformat < 3:
            return False
        trusted = True
        for cell in yield_code_cells(nb):
            # only distrust a cell if it actually has some output to distrust
            if not self._check_cell(cell, nb.nbformat):
                trusted = False

        return trusted
Пример #12
0
class SingleUserNotebookAppMixin(Configurable):
    """A Subclass of the regular NotebookApp that is aware of the parent multiuser context."""

    description = dedent("""
    Single-user server for JupyterHub. Extends the Jupyter Notebook server.

    Meant to be invoked by JupyterHub Spawners, not directly.
    """)

    examples = ""
    subcommands = {}
    version = __version__

    # must be set in mixin subclass
    # make_singleuser_app sets these
    # aliases = aliases
    # flags = flags
    # login_handler_class = JupyterHubLoginHandler
    # logout_handler_class = JupyterHubLogoutHandler
    # oauth_callback_handler_class = OAuthCallbackHandler
    # classes = NotebookApp.classes + [HubOAuth]

    # disable single-user app's localhost checking
    allow_remote_access = True

    # don't store cookie secrets
    cookie_secret_file = ''
    # always generate a new cookie secret on launch
    # ensures that each spawn clears any cookies from previous session,
    # triggering OAuth again
    cookie_secret = Bytes()

    def _cookie_secret_default(self):
        return secrets.token_bytes(32)

    user = CUnicode().tag(config=True)
    group = CUnicode().tag(config=True)

    @default('user')
    def _default_user(self):
        return os.environ.get('JUPYTERHUB_USER') or ''

    @default('group')
    def _default_group(self):
        return os.environ.get('JUPYTERHUB_GROUP') or ''

    @observe('user')
    def _user_changed(self, change):
        self.log.name = change.new

    hub_host = Unicode().tag(config=True)

    hub_prefix = Unicode('/hub/').tag(config=True)

    @default('keyfile')
    def _keyfile_default(self):
        return os.environ.get('JUPYTERHUB_SSL_KEYFILE') or ''

    @default('certfile')
    def _certfile_default(self):
        return os.environ.get('JUPYTERHUB_SSL_CERTFILE') or ''

    @default('client_ca')
    def _client_ca_default(self):
        return os.environ.get('JUPYTERHUB_SSL_CLIENT_CA') or ''

    @default('hub_prefix')
    def _hub_prefix_default(self):
        base_url = os.environ.get('JUPYTERHUB_BASE_URL') or '/'
        return base_url + 'hub/'

    hub_api_url = Unicode().tag(config=True)

    @default('hub_api_url')
    def _hub_api_url_default(self):
        return os.environ.get(
            'JUPYTERHUB_API_URL') or 'http://127.0.0.1:8081/hub/api'

    # defaults for some configurables that may come from service env variables:
    @default('base_url')
    def _base_url_default(self):
        return os.environ.get('JUPYTERHUB_SERVICE_PREFIX') or '/'

    # Note: this may be removed if notebook module is >= 5.0.0b1
    @validate('base_url')
    def _validate_base_url(self, proposal):
        """ensure base_url starts and ends with /"""
        value = proposal.value
        if not value.startswith('/'):
            value = '/' + value
        if not value.endswith('/'):
            value = value + '/'
        return value

    @default('port')
    def _port_default(self):
        if os.environ.get('JUPYTERHUB_SERVICE_URL'):
            url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
            if url.port:
                return url.port
            elif url.scheme == 'http':
                return 80
            elif url.scheme == 'https':
                return 443
        return 8888

    @default('ip')
    def _ip_default(self):
        if os.environ.get('JUPYTERHUB_SERVICE_URL'):
            url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
            if url.hostname:
                return url.hostname
        return '127.0.0.1'

    # disable some single-user configurables
    token = ''
    open_browser = False
    quit_button = False
    trust_xheaders = True

    port_retries = (
        0  # disable port-retries, since the Spawner will tell us what port to use
    )

    disable_user_config = Bool(
        False,
        help="""Disable user configuration of single-user server.

        Prevents user-writable files that normally configure the single-user server
        from being loaded, ensuring admins have full control of configuration.
        """,
    ).tag(config=True)

    @validate('notebook_dir')
    def _notebook_dir_validate(self, proposal):
        value = os.path.expanduser(proposal['value'])
        # Strip any trailing slashes
        # *except* if it's root
        _, path = os.path.splitdrive(value)
        if path == os.sep:
            return value
        value = value.rstrip(os.sep)
        if not os.path.isabs(value):
            # If we receive a non-absolute path, make it absolute.
            value = os.path.abspath(value)
        if not os.path.isdir(value):
            raise TraitError("No such notebook dir: %r" % value)
        return value

    @default('log_datefmt')
    def _log_datefmt_default(self):
        """Exclude date from default date format"""
        return "%Y-%m-%d %H:%M:%S"

    @default('log_format')
    def _log_format_default(self):
        """override default log format to include time"""
        return "%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s %(module)s:%(lineno)d]%(end_color)s %(message)s"

    def _confirm_exit(self):
        # disable the exit confirmation for background notebook processes
        self.io_loop.add_callback_from_signal(self.io_loop.stop)

    def migrate_config(self):
        if self.disable_user_config:
            # disable config-migration when user config is disabled
            return
        else:
            super().migrate_config()

    @property
    def config_file_paths(self):
        path = super().config_file_paths

        if self.disable_user_config:
            # filter out user-writable config dirs if user config is disabled
            path = list(_exclude_home(path))
        return path

    @property
    def nbextensions_path(self):
        path = super().nbextensions_path

        if self.disable_user_config:
            path = list(_exclude_home(path))
        return path

    @validate('static_custom_path')
    def _validate_static_custom_path(self, proposal):
        path = proposal['value']
        if self.disable_user_config:
            path = list(_exclude_home(path))
        return path

    # create dynamic default http client,
    # configured with any relevant ssl config
    hub_http_client = Any()

    @default('hub_http_client')
    def _default_client(self):
        ssl_context = make_ssl_context(self.keyfile,
                                       self.certfile,
                                       cafile=self.client_ca)
        AsyncHTTPClient.configure(None, defaults={"ssl_options": ssl_context})
        return AsyncHTTPClient()

    async def check_hub_version(self):
        """Test a connection to my Hub

        - exit if I can't connect at all
        - check version and warn on sufficient mismatch
        """
        client = self.hub_http_client
        RETRIES = 5
        for i in range(1, RETRIES + 1):
            try:
                resp = await client.fetch(self.hub_api_url)
            except Exception:
                self.log.exception(
                    "Failed to connect to my Hub at %s (attempt %i/%i). Is it running?",
                    self.hub_api_url,
                    i,
                    RETRIES,
                )
                await asyncio.sleep(min(2**i, 16))
            else:
                break
        else:
            self.exit(1)

        hub_version = resp.headers.get('X-JupyterHub-Version')
        _check_version(hub_version, __version__, self.log)

    server_name = Unicode()

    @default('server_name')
    def _server_name_default(self):
        return os.environ.get('JUPYTERHUB_SERVER_NAME', '')

    hub_activity_url = Unicode(
        config=True, help="URL for sending JupyterHub activity updates")

    @default('hub_activity_url')
    def _default_activity_url(self):
        return os.environ.get('JUPYTERHUB_ACTIVITY_URL', '')

    hub_activity_interval = Integer(
        300,
        config=True,
        help="""
        Interval (in seconds) on which to update the Hub
        with our latest activity.
        """,
    )

    @default('hub_activity_interval')
    def _default_activity_interval(self):
        env_value = os.environ.get('JUPYTERHUB_ACTIVITY_INTERVAL')
        if env_value:
            return int(env_value)
        else:
            return 300

    _last_activity_sent = Any(allow_none=True)

    async def notify_activity(self):
        """Notify jupyterhub of activity"""
        client = self.hub_http_client
        last_activity = self.web_app.last_activity()
        if not last_activity:
            self.log.debug("No activity to send to the Hub")
            return
        if last_activity:
            # protect against mixed timezone comparisons
            if not last_activity.tzinfo:
                # assume naive timestamps are utc
                self.log.warning("last activity is using naive timestamps")
                last_activity = last_activity.replace(tzinfo=timezone.utc)

        if self._last_activity_sent and last_activity < self._last_activity_sent:
            self.log.debug("No activity since %s", self._last_activity_sent)
            return

        last_activity_timestamp = isoformat(last_activity)

        async def notify():
            self.log.debug("Notifying Hub of activity %s",
                           last_activity_timestamp)
            req = HTTPRequest(
                url=self.hub_activity_url,
                method='POST',
                headers={
                    "Authorization":
                    "token {}".format(self.hub_auth.api_token),
                    "Content-Type": "application/json",
                },
                body=json.dumps({
                    'servers': {
                        self.server_name: {
                            'last_activity': last_activity_timestamp
                        }
                    },
                    'last_activity': last_activity_timestamp,
                }),
            )
            try:
                await client.fetch(req)
            except Exception:
                self.log.exception("Error notifying Hub of activity")
                return False
            else:
                return True

        await exponential_backoff(
            notify,
            fail_message="Failed to notify Hub of activity",
            start_wait=1,
            max_wait=15,
            timeout=60,
        )
        self._last_activity_sent = last_activity

    async def keep_activity_updated(self):
        if not self.hub_activity_url or not self.hub_activity_interval:
            self.log.warning("Activity events disabled")
            return
        self.log.info("Updating Hub with activity every %s seconds",
                      self.hub_activity_interval)
        while True:
            try:
                await self.notify_activity()
            except Exception as e:
                self.log.exception("Error notifying Hub of activity")
            # add 20% jitter to the interval to avoid alignment
            # of lots of requests from user servers
            t = self.hub_activity_interval * (1 + 0.2 *
                                              (random.random() - 0.5))
            await asyncio.sleep(t)

    def initialize(self, argv=None):
        # disable trash by default
        # this can be re-enabled by config
        self.config.FileContentsManager.delete_to_trash = False
        return super().initialize(argv)

    def start(self):
        self.log.info("Starting jupyterhub-singleuser server version %s",
                      __version__)
        # start by hitting Hub to check version
        ioloop.IOLoop.current().run_sync(self.check_hub_version)
        ioloop.IOLoop.current().add_callback(self.keep_activity_updated)
        super().start()

    def init_hub_auth(self):
        api_token = None
        if os.getenv('JPY_API_TOKEN'):
            # Deprecated env variable (as of 0.7.2)
            api_token = os.environ['JPY_API_TOKEN']
        if os.getenv('JUPYTERHUB_API_TOKEN'):
            api_token = os.environ['JUPYTERHUB_API_TOKEN']

        if not api_token:
            self.exit(
                "JUPYTERHUB_API_TOKEN env is required to run jupyterhub-singleuser. Did you launch it manually?"
            )
        self.hub_auth = HubOAuth(
            parent=self,
            api_token=api_token,
            api_url=self.hub_api_url,
            hub_prefix=self.hub_prefix,
            base_url=self.base_url,
            keyfile=self.keyfile,
            certfile=self.certfile,
            client_ca=self.client_ca,
        )
        # smoke check
        if not self.hub_auth.oauth_client_id:
            raise ValueError("Missing OAuth client ID")

    def init_webapp(self):
        # load the hub-related settings into the tornado settings dict
        self.init_hub_auth()
        s = self.tornado_settings
        s['log_function'] = log_request
        s['user'] = self.user
        s['group'] = self.group
        s['hub_prefix'] = self.hub_prefix
        s['hub_host'] = self.hub_host
        s['hub_auth'] = self.hub_auth
        csp_report_uri = s['csp_report_uri'] = self.hub_host + url_path_join(
            self.hub_prefix, 'security/csp-report')
        headers = s.setdefault('headers', {})
        headers['X-JupyterHub-Version'] = __version__
        # set CSP header directly to workaround bugs in jupyter/notebook 5.0
        headers.setdefault(
            'Content-Security-Policy',
            ';'.join(
                ["frame-ancestors 'self'", "report-uri " + csp_report_uri]),
        )
        super().init_webapp()

        # add OAuth callback
        self.web_app.add_handlers(
            r".*$",
            [(
                urlparse(self.hub_auth.oauth_redirect_uri).path,
                self.oauth_callback_handler_class,
            )],
        )

        # apply X-JupyterHub-Version to *all* request handlers (even redirects)
        self.patch_default_headers()
        self.patch_templates()

    def patch_default_headers(self):
        if hasattr(RequestHandler, '_orig_set_default_headers'):
            return
        RequestHandler._orig_set_default_headers = RequestHandler.set_default_headers

        def set_jupyterhub_header(self):
            self._orig_set_default_headers()
            self.set_header('X-JupyterHub-Version', __version__)

        RequestHandler.set_default_headers = set_jupyterhub_header

    def patch_templates(self):
        """Patch page templates to add Hub-related buttons"""

        self.jinja_template_vars['logo_url'] = self.hub_host + url_path_join(
            self.hub_prefix, 'logo')
        self.jinja_template_vars['hub_host'] = self.hub_host
        self.jinja_template_vars['hub_prefix'] = self.hub_prefix
        env = self.web_app.settings['jinja2_env']

        env.globals['hub_control_panel_url'] = self.hub_host + url_path_join(
            self.hub_prefix, 'home')

        # patch jinja env loading to modify page template
        def get_page(name):
            if name == 'page.html':
                return page_template

        orig_loader = env.loader
        env.loader = ChoiceLoader([FunctionLoader(get_page), orig_loader])
class Image(DOMWidget, ValueWidget, CoreWidget):
    """Displays an image as a widget.

    The `value` of this widget accepts a byte string.  The byte string is the
    raw image data that you want the browser to display.  You can explicitly
    define the format of the byte string using the `format` trait (which
    defaults to "png").

    If you pass `"url"` to the `"format"` trait, `value` will be interpreted
    as a URL as bytes encoded in UTF-8.
    """
    _view_name = Unicode('ImageView').tag(sync=True)
    _model_name = Unicode('ImageModel').tag(sync=True)

    # Define the custom state properties to sync with the front-end
    format = Unicode('png', help="The format of the image.").tag(sync=True)
    width = CUnicode(help="Width of the image in pixels.").tag(sync=True)
    height = CUnicode(help="Height of the image in pixels.").tag(sync=True)
    value = Bytes(help="The image data as a byte string.").tag(sync=True)

    @classmethod
    def from_file(cls, filename, **kwargs):
        """
        Create an :class:`Image` from a local file.

        Parameters
        ----------
        filename: str
            The location of a file to read into the value from disk.

        **kwargs:
            The keyword arguments for `Image`

        Returns an `Image` with the value set from the filename.
        """
        value = cls._load_file_value(filename)

        if 'format' not in kwargs:
            img_format = cls._guess_format(filename)
            if img_format is not None:
                kwargs['format'] = img_format

        return cls(value=value, **kwargs)

    @classmethod
    def from_url(cls, url, **kwargs):
        """
        Create an :class:`Image` from a URL.

        :code:`Image.from_url(url)` is equivalent to:

        .. code-block: python

            img = Image(value=url, format='url')

        But both unicode and bytes arguments are allowed for ``url``.

        Parameters
        ----------
        url: [str, bytes]
            The location of a URL to load.
        """
        if isinstance(url, _text_type):
            # If unicode (str in Python 3), it needs to be encoded to bytes
            url = url.encode('utf-8')

        return cls(value=url, format='url')

    def set_value_from_file(self, filename):
        """
        Convenience method for reading a file into `value`.

        Parameters
        ----------
        filename: str
            The location of a file to read into value from disk.
        """
        value = self._load_file_value(filename)

        self.value = value

    @classmethod
    def _load_file_value(cls, filename):
        if getattr(filename, 'read', None) is not None:
            return filename.read()
        else:
            with open(filename, 'rb') as f:
                return f.read()

    @classmethod
    def _guess_format(cls, filename):
        # file objects may have a .name parameter
        name = getattr(filename, 'name', None)
        name = name or filename

        try:
            mtype, _ = mimetypes.guess_type(name)
            if not mtype.startswith('image/'):
                return None

            return mtype[len('image/'):]
        except Exception:
            return None
Пример #14
0
class SingleUserNotebookApp(NotebookApp):
    """A Subclass of the regular NotebookApp that is aware of the parent multiuser context."""
    description = dedent("""
    Single-user server for JupyterHub. Extends the Jupyter Notebook server.

    Meant to be invoked by JupyterHub Spawners, and not directly.
    """)

    examples = ""
    subcommands = {}
    version = __version__
    classes = NotebookApp.classes + [HubOAuth]

    # disable single-user app's localhost checking
    allow_remote_access = True

    # don't store cookie secrets
    cookie_secret_file = ''
    # always generate a new cookie secret on launch
    # ensures that each spawn clears any cookies from previous session,
    # triggering OAuth again
    cookie_secret = Bytes()

    def _cookie_secret_default(self):
        return os.urandom(32)

    user = CUnicode().tag(config=True)
    group = CUnicode().tag(config=True)

    @default('user')
    def _default_user(self):
        return os.environ.get('JUPYTERHUB_USER') or ''

    @default('group')
    def _default_group(self):
        return os.environ.get('JUPYTERHUB_GROUP') or ''

    @observe('user')
    def _user_changed(self, change):
        self.log.name = change.new

    hub_host = Unicode().tag(config=True)

    hub_prefix = Unicode('/hub/').tag(config=True)

    @default('keyfile')
    def _keyfile_default(self):
        return os.environ.get('JUPYTERHUB_SSL_KEYFILE') or ''

    @default('certfile')
    def _certfile_default(self):
        return os.environ.get('JUPYTERHUB_SSL_CERTFILE') or ''

    @default('client_ca')
    def _client_ca_default(self):
        return os.environ.get('JUPYTERHUB_SSL_CLIENT_CA') or ''

    @default('hub_prefix')
    def _hub_prefix_default(self):
        base_url = os.environ.get('JUPYTERHUB_BASE_URL') or '/'
        return base_url + 'hub/'

    hub_api_url = Unicode().tag(config=True)

    @default('hub_api_url')
    def _hub_api_url_default(self):
        return os.environ.get(
            'JUPYTERHUB_API_URL') or 'http://127.0.0.1:8081/hub/api'

    # defaults for some configurables that may come from service env variables:
    @default('base_url')
    def _base_url_default(self):
        return os.environ.get('JUPYTERHUB_SERVICE_PREFIX') or '/'

    #Note: this may be removed if notebook module is >= 5.0.0b1
    @validate('base_url')
    def _validate_base_url(self, proposal):
        """ensure base_url starts and ends with /"""
        value = proposal.value
        if not value.startswith('/'):
            value = '/' + value
        if not value.endswith('/'):
            value = value + '/'
        return value

    @default('port')
    def _port_default(self):
        if os.environ.get('JUPYTERHUB_SERVICE_URL'):
            url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
            if url.port:
                return url.port
            elif url.scheme == 'http':
                return 80
            elif url.scheme == 'https':
                return 443
        return 8888

    @default('ip')
    def _ip_default(self):
        if os.environ.get('JUPYTERHUB_SERVICE_URL'):
            url = urlparse(os.environ['JUPYTERHUB_SERVICE_URL'])
            if url.hostname:
                return url.hostname
        return '127.0.0.1'

    aliases = aliases
    flags = flags

    # disble some single-user configurables
    token = ''
    open_browser = False
    quit_button = False
    trust_xheaders = True
    login_handler_class = JupyterHubLoginHandler
    logout_handler_class = JupyterHubLogoutHandler
    port_retries = 0  # disable port-retries, since the Spawner will tell us what port to use

    disable_user_config = Bool(
        False,
        help="""Disable user configuration of single-user server.

        Prevents user-writable files that normally configure the single-user server
        from being loaded, ensuring admins have full control of configuration.
        """).tag(config=True)

    @validate('notebook_dir')
    def _notebook_dir_validate(self, proposal):
        value = os.path.expanduser(proposal['value'])
        # Strip any trailing slashes
        # *except* if it's root
        _, path = os.path.splitdrive(value)
        if path == os.sep:
            return value
        value = value.rstrip(os.sep)
        if not os.path.isabs(value):
            # If we receive a non-absolute path, make it absolute.
            value = os.path.abspath(value)
        if not os.path.isdir(value):
            raise TraitError("No such notebook dir: %r" % value)
        return value

    @default('log_datefmt')
    def _log_datefmt_default(self):
        """Exclude date from default date format"""
        return "%Y-%m-%d %H:%M:%S"

    @default('log_format')
    def _log_format_default(self):
        """override default log format to include time"""
        return "%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s %(module)s:%(lineno)d]%(end_color)s %(message)s"

    def _confirm_exit(self):
        # disable the exit confirmation for background notebook processes
        self.io_loop.add_callback_from_signal(self.io_loop.stop)

    def migrate_config(self):
        if self.disable_user_config:
            # disable config-migration when user config is disabled
            return
        else:
            super(SingleUserNotebookApp, self).migrate_config()

    @property
    def config_file_paths(self):
        path = super(SingleUserNotebookApp, self).config_file_paths

        if self.disable_user_config:
            # filter out user-writable config dirs if user config is disabled
            path = list(_exclude_home(path))
        return path

    @property
    def nbextensions_path(self):
        path = super(SingleUserNotebookApp, self).nbextensions_path

        if self.disable_user_config:
            path = list(_exclude_home(path))
        return path

    @validate('static_custom_path')
    def _validate_static_custom_path(self, proposal):
        path = proposal['value']
        if self.disable_user_config:
            path = list(_exclude_home(path))
        return path

    async def check_hub_version(self):
        """Test a connection to my Hub

        - exit if I can't connect at all
        - check version and warn on sufficient mismatch
        """
        ssl_context = make_ssl_context(
            self.keyfile,
            self.certfile,
            cafile=self.client_ca,
        )
        AsyncHTTPClient.configure(None, defaults={"ssl_options": ssl_context})

        client = AsyncHTTPClient()
        RETRIES = 5
        for i in range(1, RETRIES + 1):
            try:
                resp = await client.fetch(self.hub_api_url)
            except Exception:
                self.log.exception(
                    "Failed to connect to my Hub at %s (attempt %i/%i). Is it running?",
                    self.hub_api_url, i, RETRIES)
                await gen.sleep(min(2**i, 16))
            else:
                break
        else:
            self.exit(1)

        hub_version = resp.headers.get('X-JupyterHub-Version')
        _check_version(hub_version, __version__, self.log)

    def initialize(self, argv=None):
        # disable trash by default
        # this can be re-enabled by config
        self.config.FileContentsManager.delete_to_trash = False
        return super().initialize(argv)

    def start(self):
        self.log.info("Starting jupyterhub-singleuser server version %s",
                      __version__)
        # start by hitting Hub to check version
        ioloop.IOLoop.current().run_sync(self.check_hub_version)
        super(SingleUserNotebookApp, self).start()

    def init_hub_auth(self):
        api_token = None
        if os.getenv('JPY_API_TOKEN'):
            # Deprecated env variable (as of 0.7.2)
            api_token = os.environ['JPY_API_TOKEN']
        if os.getenv('JUPYTERHUB_API_TOKEN'):
            api_token = os.environ['JUPYTERHUB_API_TOKEN']

        if not api_token:
            self.exit(
                "JUPYTERHUB_API_TOKEN env is required to run jupyterhub-singleuser. Did you launch it manually?"
            )
        self.hub_auth = HubOAuth(
            parent=self,
            api_token=api_token,
            api_url=self.hub_api_url,
            hub_prefix=self.hub_prefix,
            base_url=self.base_url,
            keyfile=self.keyfile,
            certfile=self.certfile,
            client_ca=self.client_ca,
        )
        # smoke check
        if not self.hub_auth.oauth_client_id:
            raise ValueError("Missing OAuth client ID")

    def init_webapp(self):
        # load the hub-related settings into the tornado settings dict
        self.init_hub_auth()
        s = self.tornado_settings
        s['log_function'] = log_request
        s['user'] = self.user
        s['group'] = self.group
        s['hub_prefix'] = self.hub_prefix
        s['hub_host'] = self.hub_host
        s['hub_auth'] = self.hub_auth
        csp_report_uri = s['csp_report_uri'] = self.hub_host + url_path_join(
            self.hub_prefix, 'security/csp-report')
        headers = s.setdefault('headers', {})
        headers['X-JupyterHub-Version'] = __version__
        # set CSP header directly to workaround bugs in jupyter/notebook 5.0
        headers.setdefault(
            'Content-Security-Policy', ';'.join([
                "frame-ancestors 'self'",
                "report-uri " + csp_report_uri,
            ]))
        super(SingleUserNotebookApp, self).init_webapp()

        # add OAuth callback
        self.web_app.add_handlers(r".*$", [(urlparse(
            self.hub_auth.oauth_redirect_uri).path, OAuthCallbackHandler)])

        # apply X-JupyterHub-Version to *all* request handlers (even redirects)
        self.patch_default_headers()
        self.patch_templates()

    def patch_default_headers(self):
        if hasattr(RequestHandler, '_orig_set_default_headers'):
            return
        RequestHandler._orig_set_default_headers = RequestHandler.set_default_headers

        def set_jupyterhub_header(self):
            self._orig_set_default_headers()
            self.set_header('X-JupyterHub-Version', __version__)

        RequestHandler.set_default_headers = set_jupyterhub_header

    def patch_templates(self):
        """Patch page templates to add Hub-related buttons"""

        self.jinja_template_vars['logo_url'] = self.hub_host + url_path_join(
            self.hub_prefix, 'logo')
        self.jinja_template_vars['hub_host'] = self.hub_host
        self.jinja_template_vars['hub_prefix'] = self.hub_prefix
        env = self.web_app.settings['jinja2_env']

        env.globals['hub_control_panel_url'] = \
            self.hub_host + url_path_join(self.hub_prefix, 'home')

        # patch jinja env loading to modify page template
        def get_page(name):
            if name == 'page.html':
                return page_template

        orig_loader = env.loader
        env.loader = ChoiceLoader([
            FunctionLoader(get_page),
            orig_loader,
        ])
Пример #15
0
class NlDownloadLink(DOMWidget, CoreWidget):
    """A widget to download content as file with filename.


    Parameters
    ----------
    content: str
        content of the file as bytes
    filename: str
        file name
    mimetype: str
        text/csv by default
    description: str
        description for link
    tooltip: str
        tooltip to display when link hovered
    disabled: bool
        boolean value to indicate if the link is disabled
    """

    _view_name = Unicode("DownloadLinkView").tag(sync=True)
    _model_name = Unicode('DownloadLinkModel').tag(sync=True)
    _view_module = Unicode("neurolang-ipywidgets").tag(sync=True)
    _model_module = Unicode('neurolang-ipywidgets').tag(sync=True)

    _view_module_version = Unicode("0.1.0").tag(sync=True)
    _model_module_version = Unicode('^0.1.0').tag(sync=True)

    # Widget specific properties
    content = Bytes().tag(sync=True, **content_serialization)
    mimetype = Unicode("text/csv").tag(sync=True)
    filename = Unicode().tag(sync=True)
    description = Unicode().tag(sync=True)
    tooltip = Unicode("Download").tag(sync=True)
    disabled = Bool(False).tag(sync=True)

    # below lines are copied from button widget to handle click on the link

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self._click_handlers = CallbackDispatcher()
        self.on_msg(self._handle_button_msg)

    # this is necessary when data is big and it content should not be set at when widget is initialized
    def on_click(self, callback, remove=False):
        """Register a callback to execute when the button is clicked.
        The callback will be called with one argument, the clicked button
        widget instance.
        Parameters
        ----------
        remove: bool (optional)
            Set to true to remove the callback from the list of callbacks.
        """
        self._click_handlers.register_callback(callback, remove=remove)

    def click(self):
        """Programmatically trigger a click event.
        This will call the callbacks registered to the clicked button
        widget instance.
        """
        self._click_handlers(self)

    def _handle_button_msg(self, _, content, buffers):
        """Handle a msg from the front-end.
        Parameters
        ----------
        content: dict
            Content of the msg.
        """
        if content.get('event', '') == 'click':
            self.click()
Пример #16
0
class IPEngine(BaseParallelApplication):

    name = 'ipengine'
    description = _description
    examples = _examples
    classes = List([ZMQInteractiveShell, ProfileDir, Session, Kernel])
    _deprecated_classes = ["EngineFactory", "IPEngineApp"]

    enable_nanny = Bool(
        True,
        config=True,
        help="""Enable the nanny process.

    The nanny process enables remote signaling of single engines
    and more responsive notification of engine shutdown.

    .. versionadded:: 7.0

    """,
    )

    startup_script = Unicode('',
                             config=True,
                             help='specify a script to be run at startup')
    startup_command = Unicode('',
                              config=True,
                              help='specify a command to be run at startup')

    url_file = Unicode(
        '',
        config=True,
        help=
        """The full location of the file containing the connection information for
        the controller. If this is not given, the file must be in the
        security directory of the cluster directory.  This location is
        resolved using the `profile` or `profile_dir` options.""",
    )
    wait_for_url_file = Float(
        10,
        config=True,
        help="""The maximum number of seconds to wait for url_file to exist.
        This is useful for batch-systems and shared-filesystems where the
        controller and engine are started at the same time and it
        may take a moment for the controller to write the connector files.""",
    )

    url_file_name = Unicode('ipcontroller-engine.json', config=True)

    connection_info_env = Unicode()

    @default("connection_info_env")
    def _default_connection_file_env(self):
        return os.environ.get("IPP_CONNECTION_INFO", "")

    @observe('cluster_id')
    def _cluster_id_changed(self, change):
        if change['new']:
            base = 'ipcontroller-{}'.format(change['new'])
        else:
            base = 'ipcontroller'
        self.url_file_name = "%s-engine.json" % base

    log_url = Unicode(
        '',
        config=True,
        help="""The URL for the iploggerapp instance, for forwarding
        logging to a central location.""",
    )

    registration_url = Unicode(
        config=True,
        help="""Override the registration URL""",
    )
    out_stream_factory = Type(
        'ipykernel.iostream.OutStream',
        config=True,
        help="""The OutStream for handling stdout/err.
        Typically 'ipykernel.iostream.OutStream'""",
    )
    display_hook_factory = Type(
        'ipykernel.displayhook.ZMQDisplayHook',
        config=True,
        help="""The class for handling displayhook.
        Typically 'ipykernel.displayhook.ZMQDisplayHook'""",
    )
    location = Unicode(
        config=True,
        help="""The location (an IP address) of the controller.  This is
        used for disambiguating URLs, to determine whether
        loopback should be used to connect or the public address.""",
    )
    timeout = Float(
        5.0,
        config=True,
        help="""The time (in seconds) to wait for the Controller to respond
        to registration requests before giving up.""",
    )
    max_heartbeat_misses = Integer(
        50,
        config=True,
        help="""The maximum number of times a check for the heartbeat ping of a
        controller can be missed before shutting down the engine.

        If set to 0, the check is disabled.""",
    )
    sshserver = Unicode(
        config=True,
        help=
        """The SSH server to use for tunneling connections to the Controller.""",
    )
    sshkey = Unicode(
        config=True,
        help=
        """The SSH private key file to use when tunneling connections to the Controller.""",
    )
    paramiko = Bool(
        sys.platform == 'win32',
        config=True,
        help="""Whether to use paramiko instead of openssh for tunnels.""",
    )

    use_mpi = Bool(
        False,
        config=True,
        help="""Enable MPI integration.

        If set, MPI rank will be requested for my rank,
        and additionally `mpi_init` will be executed in the interactive shell.
        """,
    )
    init_mpi = Unicode(
        DEFAULT_MPI_INIT,
        config=True,
        help="""Code to execute in the user namespace when initializing MPI""",
    )
    mpi_registration_delay = Float(
        0.02,
        config=True,
        help="""Per-engine delay for mpiexec-launched engines

        avoids flooding the controller with registrations,
        which can stall under heavy load.

        Default: .02 (50 engines/sec, or 3000 engines/minute)
        """,
    )

    # not configurable:
    user_ns = Dict()
    id = Integer(
        None,
        allow_none=True,
        config=True,
        help="""Request this engine ID.

        If run in MPI, will use the MPI rank.
        Otherwise, let the Hub decide what our rank should be.
        """,
    )

    @default('id')
    def _id_default(self):
        if not self.use_mpi:
            return None
        from mpi4py import MPI

        if MPI.COMM_WORLD.size > 1:
            self.log.debug("MPI rank = %i", MPI.COMM_WORLD.rank)
            return MPI.COMM_WORLD.rank

    registrar = Instance('zmq.eventloop.zmqstream.ZMQStream', allow_none=True)
    kernel = Instance(Kernel, allow_none=True)
    hb_check_period = Integer()

    # States for the heartbeat monitoring
    # Initial values for monitored and pinged must satisfy "monitored > pinged == False" so that
    # during the first check no "missed" ping is reported. Must be floats for Python 3 compatibility.
    _hb_last_pinged = 0.0
    _hb_last_monitored = 0.0
    _hb_missed_beats = 0
    # The zmq Stream which receives the pings from the Heart
    _hb_listener = None

    bident = Bytes()
    ident = Unicode()

    @default("ident")
    def _default_ident(self):
        return self.session.session

    @default("bident")
    def _default_bident(self):
        return self.ident.encode("utf8")

    @observe("ident")
    def _ident_changed(self, change):
        self.bident = self._default_bident()

    using_ssh = Bool(False)

    context = Instance(zmq.Context)

    @default("context")
    def _default_context(self):
        return zmq.Context.instance()

    # an IPKernelApp instance, used to setup listening for shell frontends
    kernel_app = Instance(IPKernelApp, allow_none=True)

    aliases = Dict(aliases)
    flags = Dict(flags)

    curve_serverkey = Bytes(
        config=True, help="The controller's public key for CURVE security")
    curve_secretkey = Bytes(
        config=True,
        help="""The engine's secret key for CURVE security.
        Usually autogenerated on launch.""",
    )
    curve_publickey = Bytes(
        config=True,
        help="""The engine's public key for CURVE security.
        Usually autogenerated on launch.""",
    )

    @default("curve_serverkey")
    def _default_curve_serverkey(self):
        return os.environ.get("IPP_CURVE_SERVERKEY", "").encode("ascii")

    @default("curve_secretkey")
    def _default_curve_secretkey(self):
        return os.environ.get("IPP_CURVE_SECRETKEY", "").encode("ascii")

    @default("curve_publickey")
    def _default_curve_publickey(self):
        return os.environ.get("IPP_CURVE_PUBLICKEY", "").encode("ascii")

    @validate("curve_publickey", "curve_secretkey", "curve_serverkey")
    def _cast_bytes(self, proposal):
        if isinstance(proposal.value, str):
            return proposal.value.encode("ascii")
        return proposal.value

    def _ensure_curve_keypair(self):
        if not self.curve_secretkey or not self.curve_publickey:
            self.log.info("Generating new CURVE credentials")
            self.curve_publickey, self.curve_secretkey = zmq.curve_keypair()

    def find_connection_file(self):
        """Set the url file.

        Here we don't try to actually see if it exists for is valid as that
        is handled by the connection logic.
        """
        # Find the actual ipcontroller-engine.json connection file
        if not self.url_file:
            self.url_file = os.path.join(self.profile_dir.security_dir,
                                         self.url_file_name)

    def load_connection_file(self):
        """load config from a JSON connector file,
        at a *lower* priority than command-line/config files.

        Same content can be specified in $IPP_CONNECTION_INFO env
        """
        config = self.config

        if self.connection_info_env:
            self.log.info("Loading connection info from $IPP_CONNECTION_INFO")
            d = json.loads(self.connection_info_env)
        else:
            self.log.info("Loading connection file %r", self.url_file)
            with open(self.url_file) as f:
                d = json.load(f)

        # allow hand-override of location for disambiguation
        # and ssh-server
        if 'IPEngine.location' not in self.cli_config:
            self.location = d['location']
        if 'ssh' in d and not self.sshserver:
            self.sshserver = d.get("ssh")

        proto, ip = d['interface'].split('://')
        ip = disambiguate_ip_address(ip, self.location)
        d['interface'] = f'{proto}://{ip}'

        if d.get('curve_serverkey'):
            # connection file takes precedence over env, if present and defined
            self.curve_serverkey = d['curve_serverkey'].encode('ascii')
        if self.curve_serverkey:
            self.log.info("Using CurveZMQ security")
            self._ensure_curve_keypair()
        else:
            self.log.warning("Not using CurveZMQ security")

        # DO NOT allow override of basic URLs, serialization, or key
        # JSON file takes top priority there
        if d.get('key') or 'key' not in config.Session:
            config.Session.key = d.get('key', '').encode('utf8')
        config.Session.signature_scheme = d['signature_scheme']

        self.registration_url = f"{d['interface']}:{d['registration']}"

        config.Session.packer = d['pack']
        config.Session.unpacker = d['unpack']
        self.session = Session(parent=self)

        self.log.debug("Config changed:")
        self.log.debug("%r", config)
        self.connection_info = d

    def bind_kernel(self, **kwargs):
        """Promote engine to listening kernel, accessible to frontends."""
        if self.kernel_app is not None:
            return

        self.log.info(
            "Opening ports for direct connections as an IPython kernel")
        if self.curve_serverkey:
            self.log.warning("Bound kernel does not support CURVE security")

        kernel = self.kernel

        kwargs.setdefault('config', self.config)
        kwargs.setdefault('log', self.log)
        kwargs.setdefault('profile_dir', self.profile_dir)
        kwargs.setdefault('session', self.session)

        app = self.kernel_app = IPKernelApp(**kwargs)

        # allow IPKernelApp.instance():
        IPKernelApp._instance = app

        app.init_connection_file()
        # relevant contents of init_sockets:

        app.shell_port = app._bind_socket(kernel.shell_streams[0],
                                          app.shell_port)
        app.log.debug("shell ROUTER Channel on port: %i", app.shell_port)

        iopub_socket = kernel.iopub_socket
        # ipykernel 4.3 iopub_socket is an IOThread wrapper:
        if hasattr(iopub_socket, 'socket'):
            iopub_socket = iopub_socket.socket

        app.iopub_port = app._bind_socket(iopub_socket, app.iopub_port)
        app.log.debug("iopub PUB Channel on port: %i", app.iopub_port)

        kernel.stdin_socket = self.context.socket(zmq.ROUTER)
        app.stdin_port = app._bind_socket(kernel.stdin_socket, app.stdin_port)
        app.log.debug("stdin ROUTER Channel on port: %i", app.stdin_port)

        # start the heartbeat, and log connection info:

        app.init_heartbeat()

        app.log_connection_info()
        app.connection_dir = self.profile_dir.security_dir
        app.write_connection_file()

    @property
    def tunnel_mod(self):
        from zmq.ssh import tunnel

        return tunnel

    def init_connector(self):
        """construct connection function, which handles tunnels."""
        self.using_ssh = bool(self.sshkey or self.sshserver)

        if self.sshkey and not self.sshserver:
            # We are using ssh directly to the controller, tunneling localhost to localhost
            self.sshserver = self.registration_url.split('://')[1].split(
                ':')[0]

        if self.using_ssh:
            if self.tunnel_mod.try_passwordless_ssh(self.sshserver,
                                                    self.sshkey,
                                                    self.paramiko):
                password = False
            else:
                password = getpass("SSH Password for %s: " % self.sshserver)
        else:
            password = False

        def connect(s, url, curve_serverkey=None):
            url = disambiguate_url(url, self.location)
            if curve_serverkey is None:
                curve_serverkey = self.curve_serverkey
            if curve_serverkey:
                s.setsockopt(zmq.CURVE_SERVERKEY, curve_serverkey)
                s.setsockopt(zmq.CURVE_SECRETKEY, self.curve_secretkey)
                s.setsockopt(zmq.CURVE_PUBLICKEY, self.curve_publickey)

            if self.using_ssh:
                self.log.debug("Tunneling connection to %s via %s", url,
                               self.sshserver)
                return self.tunnel_mod.tunnel_connection(
                    s,
                    url,
                    self.sshserver,
                    keyfile=self.sshkey,
                    paramiko=self.paramiko,
                    password=password,
                )
            else:
                return s.connect(url)

        def maybe_tunnel(url):
            """like connect, but don't complete the connection (for use by heartbeat)"""
            url = disambiguate_url(url, self.location)
            if self.using_ssh:
                self.log.debug("Tunneling connection to %s via %s", url,
                               self.sshserver)
                url, tunnelobj = self.tunnel_mod.open_tunnel(
                    url,
                    self.sshserver,
                    keyfile=self.sshkey,
                    paramiko=self.paramiko,
                    password=password,
                )
            return str(url)

        return connect, maybe_tunnel

    def register(self):
        """send the registration_request"""
        if self.use_mpi and self.id and self.id >= 100 and self.mpi_registration_delay:
            # Some launchres implement delay at the Launcher level,
            # but mpiexec must implement it int he engine process itself
            # delay based on our rank

            delay = self.id * self.mpi_registration_delay
            self.log.info(
                f"Delaying registration for {self.id} by {int(delay * 1000)}ms"
            )
            time.sleep(delay)

        self.log.info("Registering with controller at %s" %
                      self.registration_url)
        ctx = self.context
        connect, maybe_tunnel = self.init_connector()
        reg = ctx.socket(zmq.DEALER)
        reg.setsockopt(zmq.IDENTITY, self.bident)
        connect(reg, self.registration_url)

        self.registrar = zmqstream.ZMQStream(reg, self.loop)

        content = dict(uuid=self.ident)
        if self.id is not None:
            self.log.info("Requesting id: %i", self.id)
            content['id'] = self.id
        self._registration_completed = False
        self.registrar.on_recv(
            lambda msg: self.complete_registration(msg, connect, maybe_tunnel))

        self.session.send(self.registrar,
                          "registration_request",
                          content=content)

    def _report_ping(self, msg):
        """Callback for when the heartmonitor.Heart receives a ping"""
        # self.log.debug("Received a ping: %s", msg)
        self._hb_last_pinged = time.time()

    def complete_registration(self, msg, connect, maybe_tunnel):
        try:
            self._complete_registration(msg, connect, maybe_tunnel)
        except Exception as e:
            self.log.critical(f"Error completing registration: {e}",
                              exc_info=True)
            self.exit(255)

    def _complete_registration(self, msg, connect, maybe_tunnel):
        ctx = self.context
        loop = self.loop
        identity = self.bident
        idents, msg = self.session.feed_identities(msg)
        msg = self.session.deserialize(msg)
        content = msg['content']
        info = self.connection_info

        def url(key):
            """get zmq url for given channel"""
            return str(info["interface"] + ":%i" % info[key])

        def urls(key):
            return [f'{info["interface"]}:{port}' for port in info[key]]

        if content['status'] == 'ok':
            requested_id = self.id
            self.id = content['id']
            if requested_id is not None and self.id != requested_id:
                self.log.warning(
                    f"Did not get the requested id: {self.id} != {requested_id}"
                )
                self.log.name = self.log.name.rsplit(".", 1)[0] + f".{self.id}"
            elif self.id is None:
                self.log.name += f".{self.id}"

            # create Shell Connections (MUX, Task, etc.):

            # select which broadcast endpoint to connect to
            # use rank % len(broadcast_leaves)
            broadcast_urls = urls('broadcast')
            broadcast_leaves = len(broadcast_urls)
            broadcast_index = self.id % len(broadcast_urls)
            broadcast_url = broadcast_urls[broadcast_index]

            shell_addrs = [url('mux'), url('task'), broadcast_url]
            self.log.info(f'Shell_addrs: {shell_addrs}')

            # Use only one shell stream for mux and tasks
            stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop)
            stream.setsockopt(zmq.IDENTITY, identity)
            # TODO: enable PROBE_ROUTER when schedulers can handle the empty message
            # stream.setsockopt(zmq.PROBE_ROUTER, 1)
            self.log.debug("Setting shell identity %r", identity)

            shell_streams = [stream]
            for addr in shell_addrs:
                self.log.info("Connecting shell to %s", addr)
                connect(stream, addr)

            # control stream:
            control_url = url('control')
            curve_serverkey = self.curve_serverkey
            if self.enable_nanny:
                nanny_url, self.nanny_pipe = self.start_nanny(
                    control_url=control_url, )
                control_url = nanny_url
                # nanny uses our curve_publickey, not the controller's publickey
                curve_serverkey = self.curve_publickey
            control_stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop)
            control_stream.setsockopt(zmq.IDENTITY, identity)
            connect(control_stream,
                    control_url,
                    curve_serverkey=curve_serverkey)

            # create iopub stream:
            iopub_addr = url('iopub')
            iopub_socket = ctx.socket(zmq.PUB)
            iopub_socket.SNDHWM = 0
            iopub_socket.setsockopt(zmq.IDENTITY, identity)
            connect(iopub_socket, iopub_addr)
            try:
                from ipykernel.iostream import IOPubThread
            except ImportError:
                pass
            else:
                iopub_socket = IOPubThread(iopub_socket)
                iopub_socket.start()

            # disable history:
            self.config.HistoryManager.hist_file = ':memory:'

            # Redirect input streams and set a display hook.
            if self.out_stream_factory:
                sys.stdout = self.out_stream_factory(self.session,
                                                     iopub_socket, 'stdout')
                sys.stdout.topic = f"engine.{self.id}.stdout".encode("ascii")
                sys.stderr = self.out_stream_factory(self.session,
                                                     iopub_socket, 'stderr')
                sys.stderr.topic = f"engine.{self.id}.stderr".encode("ascii")

                # copied from ipykernel 6, which captures sys.__stderr__ at the FD-level
                if getattr(sys.stderr, "_original_stdstream_copy",
                           None) is not None:
                    for handler in self.log.handlers:
                        if isinstance(handler, StreamHandler) and (
                                handler.stream.buffer.fileno() == 2):
                            self.log.debug(
                                "Seeing logger to stderr, rerouting to raw filedescriptor."
                            )

                            handler.stream = TextIOWrapper(
                                FileIO(sys.stderr._original_stdstream_copy,
                                       "w"))
            if self.display_hook_factory:
                sys.displayhook = self.display_hook_factory(
                    self.session, iopub_socket)
                sys.displayhook.topic = f"engine.{self.id}.execute_result".encode(
                    "ascii")

            # patch Session to always send engine uuid metadata
            original_send = self.session.send

            def send_with_metadata(
                stream,
                msg_or_type,
                content=None,
                parent=None,
                ident=None,
                buffers=None,
                track=False,
                header=None,
                metadata=None,
                **kwargs,
            ):
                """Ensure all messages set engine uuid metadata"""
                metadata = metadata or {}
                metadata.setdefault("engine", self.ident)
                return original_send(
                    stream,
                    msg_or_type,
                    content=content,
                    parent=parent,
                    ident=ident,
                    buffers=buffers,
                    track=track,
                    header=header,
                    metadata=metadata,
                    **kwargs,
                )

            self.session.send = send_with_metadata

            self.kernel = Kernel.instance(
                parent=self,
                engine_id=self.id,
                ident=self.ident,
                session=self.session,
                control_stream=control_stream,
                shell_streams=shell_streams,
                iopub_socket=iopub_socket,
                loop=loop,
                user_ns=self.user_ns,
                log=self.log,
            )

            self.kernel.shell.display_pub.topic = f"engine.{self.id}.displaypub".encode(
                "ascii")

            # FIXME: This is a hack until IPKernelApp and IPEngineApp can be fully merged
            self.init_signal()
            app = IPKernelApp(parent=self,
                              shell=self.kernel.shell,
                              kernel=self.kernel,
                              log=self.log)
            if self.use_mpi and self.init_mpi:
                app.exec_lines.insert(0, self.init_mpi)
            app.init_profile_dir()
            app.init_code()

            self.kernel.start()
        else:
            self.log.fatal("Registration Failed: %s" % msg)
            raise Exception("Registration Failed: %s" % msg)

        self.start_heartbeat(
            maybe_tunnel(url('hb_ping')),
            maybe_tunnel(url('hb_pong')),
            content['hb_period'],
            identity,
        )
        self.log.info("Completed registration with id %i" % self.id)
        self.loop.remove_timeout(self._abort_timeout)

    def start_nanny(self, control_url):
        self.log.info("Starting nanny")
        config = Config()
        config.Session = self.config.Session
        return start_nanny(
            engine_id=self.id,
            identity=self.bident,
            control_url=control_url,
            curve_serverkey=self.curve_serverkey,
            curve_secretkey=self.curve_secretkey,
            curve_publickey=self.curve_publickey,
            registration_url=self.registration_url,
            config=config,
        )

    def start_heartbeat(self, hb_ping, hb_pong, hb_period, identity):
        """Start our heart beating"""

        hb_monitor = None
        if self.max_heartbeat_misses > 0:
            # Add a monitor socket which will record the last time a ping was seen
            mon = self.context.socket(zmq.SUB)
            if self.curve_serverkey:
                mon.setsockopt(zmq.CURVE_SERVER, 1)
                mon.setsockopt(zmq.CURVE_SECRETKEY, self.curve_secretkey)
            mport = mon.bind_to_random_port('tcp://%s' % localhost())
            mon.setsockopt(zmq.SUBSCRIBE, b"")
            self._hb_listener = zmqstream.ZMQStream(mon, self.loop)
            self._hb_listener.on_recv(self._report_ping)

            hb_monitor = "tcp://%s:%i" % (localhost(), mport)

        heart = Heart(
            hb_ping,
            hb_pong,
            hb_monitor,
            heart_id=identity,
            curve_serverkey=self.curve_serverkey,
            curve_secretkey=self.curve_secretkey,
            curve_publickey=self.curve_publickey,
        )
        heart.start()

        # periodically check the heartbeat pings of the controller
        # Should be started here and not in "start()" so that the right period can be taken
        # from the hubs HeartBeatMonitor.period
        if self.max_heartbeat_misses > 0:
            # Use a slightly bigger check period than the hub signal period to not warn unnecessary
            self.hb_check_period = hb_period + 500
            self.log.info(
                "Starting to monitor the heartbeat signal from the hub every %i ms.",
                self.hb_check_period,
            )
            self._hb_reporter = ioloop.PeriodicCallback(
                self._hb_monitor, self.hb_check_period)
            self._hb_reporter.start()
        else:
            self.log.info(
                "Monitoring of the heartbeat signal from the hub is not enabled."
            )

    def abort(self):
        self.log.fatal("Registration timed out after %.1f seconds" %
                       self.timeout)
        if "127." in self.registration_url:
            self.log.fatal("""
            If the controller and engines are not on the same machine,
            you will have to instruct the controller to listen on an external IP (in ipcontroller_config.py):
                c.IPController.ip = '0.0.0.0' # for all interfaces, internal and external
                c.IPController.ip = '192.168.1.101' # or any interface that the engines can see
            or tunnel connections via ssh.
            """)
        self.session.send(self.registrar,
                          "unregistration_request",
                          content=dict(id=self.id))
        time.sleep(1)
        sys.exit(255)

    def _hb_monitor(self):
        """Callback to monitor the heartbeat from the controller"""
        self._hb_listener.flush()
        if self._hb_last_monitored > self._hb_last_pinged:
            self._hb_missed_beats += 1
            self.log.warning(
                "No heartbeat in the last %s ms (%s time(s) in a row).",
                self.hb_check_period,
                self._hb_missed_beats,
            )
        else:
            # self.log.debug("Heartbeat received (after missing %s beats).", self._hb_missed_beats)
            self._hb_missed_beats = 0

        if self._hb_missed_beats >= self.max_heartbeat_misses:
            self.log.fatal(
                "Maximum number of heartbeats misses reached (%s times %s ms), shutting down.",
                self.max_heartbeat_misses,
                self.hb_check_period,
            )
            self.session.send(self.registrar,
                              "unregistration_request",
                              content=dict(id=self.id))
            self.loop.stop()

        self._hb_last_monitored = time.time()

    def init_engine(self):
        # This is the working dir by now.
        sys.path.insert(0, '')
        config = self.config

        if not self.connection_info_env:
            self.find_connection_file()
            if self.wait_for_url_file and not os.path.exists(self.url_file):
                self.log.warning(
                    f"Connection file {self.url_file!r} not found")
                self.log.warning(
                    "Waiting up to %.1f seconds for it to arrive.",
                    self.wait_for_url_file,
                )
                tic = time.monotonic()
                while not os.path.exists(self.url_file) and (
                        time.monotonic() - tic < self.wait_for_url_file):
                    # wait for url_file to exist, or until time limit
                    time.sleep(0.1)

            if not os.path.exists(self.url_file):
                self.log.fatal(
                    f"Fatal: connection file never arrived: {self.url_file}")
                self.exit(1)

        self.load_connection_file()

        exec_lines = []
        for app in ('IPKernelApp', 'InteractiveShellApp'):
            if '%s.exec_lines' % app in config:
                exec_lines = config[app].exec_lines
                break

        exec_files = []
        for app in ('IPKernelApp', 'InteractiveShellApp'):
            if '%s.exec_files' % app in config:
                exec_files = config[app].exec_files
                break

        config.IPKernelApp.exec_lines = exec_lines
        config.IPKernelApp.exec_files = exec_files

        if self.startup_script:
            exec_files.append(self.startup_script)
        if self.startup_command:
            exec_lines.append(self.startup_command)

    def forward_logging(self):
        if self.log_url:
            self.log.info("Forwarding logging to %s", self.log_url)
            context = self.context
            lsock = context.socket(zmq.PUB)
            lsock.connect(self.log_url)
            handler = EnginePUBHandler(self.engine, lsock)
            handler.setLevel(self.log_level)
            self.log.addHandler(handler)

    @catch_config_error
    def initialize(self, argv=None):
        super().initialize(argv)
        self.init_engine()
        self.forward_logging()

    def init_signal(self):
        signal.signal(signal.SIGINT, self._signal_sigint)
        signal.signal(signal.SIGTERM, self._signal_stop)

    def _signal_sigint(self, sig, frame):
        self.log.warning("Ignoring SIGINT. Terminate with SIGTERM.")

    def _signal_stop(self, sig, frame):
        self.log.critical(f"received signal {sig}, stopping")
        self.loop.add_callback_from_signal(self.loop.stop)

    def start(self):
        if self.id is not None:
            self.log.name += f".{self.id}"
        loop = self.loop

        def _start():
            self.register()
            self._abort_timeout = loop.add_timeout(loop.time() + self.timeout,
                                                   self.abort)

        self.loop.add_callback(_start)
        try:
            self.loop.start()
        except KeyboardInterrupt:
            self.log.critical("Engine Interrupted, shutting down...\n")
Пример #17
0
class AcquireImagesProcess(GlobalConfiguration):
    use_simulated_camera = Bool(False).tag(config=True)
    camera_housekeeping_subdir = Unicode('camera').tag(config=True)
    acquire_counters_name = Unicode('acquire_images').tag(config=True)
    camera_ip_address = Bytes("10.0.0.2").tag(config=True)
    trigger_interval = Int(default_value=2,min=1).tag(config=True)
    required_camera_configuration = List(trait=Tuple(Bytes(), Bytes()),
                                        default_value=[("PtpMode", "Slave"),
                                                       ("ChunkModeActive", "1"),
                                                       ("AcquisitionFrameCount", "1"),
                                                       ('AcquisitionMode', "MultiFrame"),
                                                       ("StreamFrameRateConstrain", "0"),
                                                       ('TriggerSource', 'FixedRate'),]).tag(config=True)
    initial_camera_configuration = List(trait=Tuple(Bytes(), Bytes()),
                                        default_value=[("AcquisitionFrameCount", "1"),
                                                       ('AcquisitionFrameRateAbs', "1.3"),
                                                       ('ExposureTimeAbs', "100000"),
                                                       ('EFLensFocusCurrent', "2050")]).tag(config=True)

    def __init__(self, raw_image_buffers, acquire_image_output_queue, acquire_image_input_queue,
                 command_queue, command_result_queue, info_buffer, status, uri, **kwargs):
        super(AcquireImagesProcess,self).__init__(**kwargs)
        self.data_buffers = raw_image_buffers
        self.input_queue = acquire_image_input_queue
        self.output_queue = acquire_image_output_queue
        self.command_queue = command_queue
        self.command_result_queue = command_result_queue
        self.info_buffer = info_buffer
        self.uri = uri
        self.camera_housekeeping_dir = os.path.join(self.housekeeping_dir,self.camera_housekeeping_subdir)
        self.status = status
        self.status.value = "starting"
        self.status_log_filename = None
        self.status_log_file = None
        self.status_log_last_update = 0
        self.status_log_update_interval = 10
        self.columns = camera_status_columns
        self.child = mp.Process(target=self.run)
        # self.child.start()

    def create_log_file(self, columns):
        try:
            os.makedirs(self.camera_housekeeping_dir)
        except OSError as e:
            if e.errno == errno.EEXIST and os.path.isdir(self.camera_housekeeping_dir):
                pass
            else:
                logger.exception("Could not create housekeeping directory %s" % self.camera_housekeeping_dir)

        self.status_log_filename = os.path.join(self.camera_housekeeping_dir, (time.strftime('%Y-%m-%d_%H%M%S.csv')))
        self.status_log_file = open(self.status_log_filename, 'a')
        self.status_log_file.write('# %s %s %s %s\n' %
                                   (self.pc.get_parameter("DeviceModelName"),
                                    self.pc.get_parameter("DeviceID"),
                                    self.pc.get_parameter("DeviceFirmwareVersion"),
                                    self.pc.get_parameter("GevDeviceMACAddress")))
        self.status_log_file.write(','.join(['epoch'] + columns) + '\n')

    def get_temperatures(self):
        self.pc.set_parameter("DeviceTemperatureSelector", "Main")
        main = self.pc.get_parameter("DeviceTemperature")
        self.pc.set_parameter("DeviceTemperatureSelector", "Sensor")
        sensor = self.pc.get_parameter("DeviceTemperature")
        return dict(main_temperature=main, sensor_temperature=sensor)

    def log_status(self, status_update):
        if time.time() - self.status_log_last_update < self.status_log_update_interval:
            return
        self.status_log_last_update = time.time()
        status_update = status_update.copy()
        camera_status = status_update.pop('all_camera_parameters')
        status_update.update(camera_status)
        if self.status_log_file is None:
            self.create_log_file(self.columns)
        values = [status_update['camera_status_update_at']]
        for column in self.columns:
            values.append(status_update[column])
        self.status_log_file.write(','.join(['%s' % value for value in values]) + '\n')
        self.status_log_file.flush()

    def run(self):
        self.pipeline = Pyro4.Proxy(uri=self.uri)
        self.pipeline._pyroTimeout = 0
        self.counters = CounterCollection(self.acquire_counters_name, self.counters_dir)
        self.counters.camera_armed.reset()
        self.counters.buffer_queued.reset()
        self.counters.error_queuing_buffer.reset()
        self.counters.command_sent.reset()
        self.counters.parameter_set.reset()
        self.counters.command_non_zero_result.reset()
        self.counters.waiting_for_buffer.reset()
        self.counters.waiting_for_buffer.lazy = True # This gets incremented several times per second, so no need to record every event
        self.counters.buffer_filled.reset()
        self.counters.getting_parameters.reset()
        self.counters.waiting.reset()
        self.counters.waiting.lazy = True  # waiting gets incremented hundreds of times per second, no need to record every increment

        # Setup
        frame_number = 0
        from pmc_turbo import camera
        self.status.value = "initializing camera"
        self.pc = camera.PyCamera(self.camera_ip_address, use_simulated_camera=self.use_simulated_camera)
        for name,value in self.required_camera_configuration:
            self.pc.set_parameter(name,value)
        for name,value in self.initial_camera_configuration:
            self.pc.set_parameter(name,value)

        self.payload_size = int(self.pc.get_parameter('PayloadSize'))
        logger.info("payload size: %d" % self.payload_size)

        self.pc._pc.start_capture()

        camera_parameters_last_updated = 0

        last_trigger = int(time.time() + 1)
        buffers_on_camera = set()
        self.acquisition_start_time = time.time()
        # Run loop
        exit_request = False
        self.status.value = "idle"

        while True:
            while True:
                try:
                    ready_to_queue = self.input_queue.get_nowait()
                except EmptyException:
                    break
                if ready_to_queue is None:
                    exit_request = True
                    break
                self.status.value = "queueing buffer %d" % ready_to_queue
                image_buffer = np.frombuffer(self.data_buffers[ready_to_queue].get_obj(), dtype='uint8')
                # cast the buffer array using the compound data type that has a spot for each info field
                npy_info_buffer = np.frombuffer(self.info_buffer[ready_to_queue].get_obj(), dtype=frame_info_dtype)
                result = self.pc._pc.queue_buffer(image_buffer, npy_info_buffer)
                if result != 0:
                    logger.error("Errorcode while queueing buffer: %r" % result)
                    self.counters.error_queuing_buffer.increment()
                else:
                    buffers_on_camera.add(ready_to_queue)
                    self.counters.buffer_queued.increment()
            if exit_request:
                break
            if time.time() > last_trigger + (self.trigger_interval-0.5):
                gate_time = int(time.time() + 1) # the amount of time since last trigger is already almost the trigger interval, so always advance to next second here.
                if not self.command_queue.empty():
                    name, value, tag = self.command_queue.get()
                    self.status.value = "sending command"
                    if name == 'trigger_interval':
                        try:
                            self.trigger_interval = int(value)
                            result = 0
                        except Exception:
                            logger.exception("Failed to set trigger_interval to %r" % value)
                            result = -1
                    else:
                        if value is None:
                            result = self.pc.run_feature_command(name)
                            self.counters.command_sent.increment()
                        else:
                            result = self.pc.set_parameter(name, value)
                            self.counters.parameter_set.increment()
                    if result:
                        logger.error("Errorcode %r while executing command %s:%r" % (result, name, value))
                        self.counters.command_non_zero_result.increment()
                    gate_time = int(time.time() + 1)  # update gate time in case some time has elapsed while executing
                    # command
                    self.command_result_queue.put((tag, name, value, result, gate_time))
                self.status.value = "arming camera"
                self.pc.set_parameter('PtpAcquisitionGateTime', str(int(gate_time * 1e9)))
                time.sleep(0.1)
                self.pc.run_feature_command("AcquisitionStart")
                last_trigger = gate_time
                self.counters.camera_armed.increment()

            if not buffers_on_camera:
                self.status.value = "waiting for buffer on camera"
                time.sleep(0.001)
                self.counters.waiting_for_buffer.increment()
            else:
                self.status.value = "checking buffers"
            num_buffers_filled = 0
            for buffer_id in list(buffers_on_camera):
                npy_info_buffer = np.frombuffer(self.info_buffer[buffer_id].get_obj(), dtype=frame_info_dtype)
                if npy_info_buffer[0]['is_filled']:
                    self.status.value = 'buffer %d was filled by camera' % buffer_id
                    logger.debug(self.status.value)
                    self.output_queue.put(buffer_id)
                    buffers_on_camera.remove(buffer_id)
                    frame_number += 1
                    num_buffers_filled += 1
                    self.counters.buffer_filled.increment()
            if num_buffers_filled == 0:
                self.status.value = "waiting for buffer to be filled"
                update_at = time.time()
                if update_at - camera_parameters_last_updated > 1.0:
                    self.status.value = "getting camera parameters"
                    status = self.pc.get_all_parameters()
                    temperatures = self.get_temperatures()
                    camera_parameters_last_updated = update_at

                    self.status.value = "updating status"
                    timestamp_comparison = self.pc.compare_timestamps() * 1e6
                    status_update = dict(all_camera_parameters=status,
                                         camera_status_update_at=update_at,
                                         camera_timestamp_offset=timestamp_comparison,
                                         total_frames=frame_number,
                                         trigger_interval=self.trigger_interval,
                                         )
                    status_update.update(temperatures)
                    self.log_status(status_update)
                    self.pipeline.update_status(status_update)
                    self.counters.getting_parameters.increment()
                else:
                    time.sleep(0.001)
                    self.status.value = "waiting"
                    self.counters.waiting.increment()
        # if we get here, we were kindly asked to exit
        self.status.value = "exiting"
        if self.use_simulated_camera:
            self.pc._pc.quit()
        return None
Пример #18
0
class Mesh(DrawableWithCallback):
    """
    A 3D triangles mesh.

    Attributes:
        vertices: `array_like`.
            Array of triangle vertices: float (x, y, z) coordinate triplets.
        indices: `array_like`.
            Array of vertex indices: int triplets of indices from vertices array.
        color: `int`.
            Packed RGB color of the mesh (0xff0000 is red, 0xff is blue) when not using color maps.
        colors: `array_like`.
            Same-length array of (`int`) packed RGB color of the points (0xff0000 is red, 0xff is blue).
        attribute: `array_like`.
            Array of float attribute for the color mapping, coresponding to each vertex.
        color_map: `list`.
            A list of float quadruplets (attribute value, R, G, B), sorted by attribute value. The first
            quadruplet should have value 0.0, the last 1.0; R, G, B are RGB color components in the range 0.0 to 1.0.
        color_range: `list`.
            A pair [min_value, max_value], which determines the levels of color attribute mapped
            to 0 and 1 in the color map respectively.
        wireframe: `bool`.
            Whether mesh should display as wireframe.
        flat_shading: `bool`.
            Whether mesh should display with flat shading.
        opacity: `float`.
            Opacity of mesh.
        volume: `array_like`.
            3D array of `float`
        volume_bounds: `array_like`.
            6-element tuple specifying the bounds of the volume data (x0, x1, y0, y1, z0, z1)
        texture: `bytes`.
            Image data in a specific format.
        texture_file_format: `str`.
            Format of the data, it should be the second part of MIME format of type 'image/',
            for example 'jpeg', 'png', 'gif', 'tiff'.
        uvs: `array_like`.
            Array of float uvs for the texturing, coresponding to each vertex.
        model_matrix: `array_like`.
            4x4 model transform matrix.
    """

    type = Unicode(read_only=True).tag(sync=True)
    vertices = TimeSeries(Array(dtype=np.float32)).tag(
        sync=True, **array_serialization_wrap('vertices'))
    indices = TimeSeries(Array(dtype=np.uint32)).tag(
        sync=True, **array_serialization_wrap('indices'))
    color = TimeSeries(Int(min=0, max=0xffffff)).tag(sync=True)
    colors = TimeSeries(Array(dtype=np.uint32)).tag(
        sync=True, **array_serialization_wrap('colors'))
    attribute = TimeSeries(Array(dtype=np.float32)).tag(
        sync=True, **array_serialization_wrap('attribute'))
    color_map = TimeSeries(Array(dtype=np.float32)).tag(
        sync=True, **array_serialization_wrap('color_map'))
    color_range = TimeSeries(ListOrArray(minlen=2, maxlen=2,
                                         empty_ok=True)).tag(sync=True)
    wireframe = TimeSeries(Bool()).tag(sync=True)
    flat_shading = TimeSeries(Bool()).tag(sync=True)
    side = TimeSeries(Unicode()).tag(sync=True)
    opacity = TimeSeries(Float(min=0.0, max=1.0,
                               default_value=1.0)).tag(sync=True)
    volume = TimeSeries(Array()).tag(sync=True,
                                     **array_serialization_wrap('volume'))
    volume_bounds = TimeSeries(Array(dtype=np.float32)).tag(
        sync=True, **array_serialization_wrap('volume_bounds'))
    texture = Bytes(allow_none=True).tag(sync=True)
    texture_file_format = Unicode(allow_none=True).tag(sync=True)
    uvs = TimeSeries(Array()).tag(sync=True, **array_serialization_wrap('uvs'))
    opacity_function = TimeSeries(Array(dtype=np.float32)).tag(
        sync=True, **array_serialization_wrap('opacity_function'))
    model_matrix = TimeSeries(Array(dtype=np.float32)).tag(
        sync=True, **array_serialization_wrap('model_matrix'))

    def __init__(self, **kwargs):
        super(Mesh, self).__init__(**kwargs)

        self.set_trait('type', 'Mesh')

    @validate('colors')
    def _validate_colors(self, proposal):
        if type(proposal['value']) is dict or type(self.vertices) is dict:
            return proposal['value']

        required = self.vertices.size // 3  # (x, y, z) triplet per 1 color
        actual = proposal['value'].size
        if actual != 0 and required != actual:
            raise TraitError('colors has wrong size: %s (%s required)' %
                             (actual, required))
        return proposal['value']

    @validate('volume')
    def _validate_volume(self, proposal):
        if type(proposal['value']) is dict:
            return proposal['value']

        if type(
                proposal['value']
        ) is np.ndarray and proposal['value'].dtype is np.dtype(object):
            return proposal['value'].tolist()

        if proposal['value'].shape == (0, ):
            return np.array(proposal['value'], dtype=np.float32)

        required = [np.float16, np.float32]
        actual = proposal['value'].dtype

        if actual not in required:
            warnings.warn('wrong dtype: %s (%s required)' % (actual, required))

            return proposal['value'].astype(np.float32)

        return proposal['value']

    def get_bounding_box(self):
        return get_bounding_box_points(self.vertices, self.model_matrix)
Пример #19
0
class _Media(DOMWidget, ValueWidget, CoreWidget):
    """Base class for Image, Audio and Video widgets.

    The `value` of this widget accepts a byte string.  The byte string is the
    raw data that you want the browser to display.

    If you pass `"url"` to the `"format"` trait, `value` will be interpreted
    as a URL as bytes encoded in UTF-8.
    """

    # Define the custom state properties to sync with the front-end
    value = Bytes(help="The media data as a byte string.").tag(
        sync=True, **bytes_serialization)

    @classmethod
    def _from_file(cls, tag, filename, **kwargs):
        """
        Create an :class:`Media` from a local file.

        Parameters
        ----------
        filename: str
            The location of a file to read into the value from disk.

        **kwargs:
            The keyword arguments for `Media`

        Returns an `Media` with the value set from the filename.
        """
        value = cls._load_file_value(filename)

        if 'format' not in kwargs:
            format = cls._guess_format(tag, filename)
            if format is not None:
                kwargs['format'] = format

        return cls(value=value, **kwargs)

    @classmethod
    def from_url(cls, url, **kwargs):
        """
        Create an :class:`Media` from a URL.

        :code:`Media.from_url(url)` is equivalent to:

        .. code-block: python

            med = Media(value=url, format='url')

        But both unicode and bytes arguments are allowed for ``url``.

        Parameters
        ----------
        url: [str, bytes]
            The location of a URL to load.
        """
        if isinstance(url, str):
            # If str, it needs to be encoded to bytes
            url = url.encode('utf-8')

        return cls(value=url, format='url')

    def set_value_from_file(self, filename):
        """
        Convenience method for reading a file into `value`.

        Parameters
        ----------
        filename: str
            The location of a file to read into value from disk.
        """
        value = self._load_file_value(filename)

        self.value = value

    @classmethod
    def _load_file_value(cls, filename):
        if getattr(filename, 'read', None) is not None:
            return filename.read()
        else:
            with open(filename, 'rb') as f:
                return f.read()

    @classmethod
    def _guess_format(cls, tag, filename):
        # file objects may have a .name parameter
        name = getattr(filename, 'name', None)
        name = name or filename

        try:
            mtype, _ = mimetypes.guess_type(name)
            if not mtype.startswith('{}/'.format(tag)):
                return None

            return mtype[len('{}/'.format(tag)):]
        except Exception:
            return None

    def _get_repr(self, cls):
        # Truncate the value in the repr, since it will
        # typically be very, very large.
        class_name = self.__class__.__name__

        # Return value first like a ValueWidget
        signature = []
        sig_value = repr(self.value)
        prefix, rest = sig_value.split("'", 1)
        content = rest[:-1]
        if len(content) > 100:
            sig_value = "{}'{}...'".format(prefix, content[0:100])
        signature.append('{}={}'.format('value', sig_value))

        for key in super(cls, self)._repr_keys():
            if key == 'value':
                continue
            value = str(getattr(self, key))
            signature.append('{}={!r}'.format(key, value))
        signature = ', '.join(signature)
        return '{}({})'.format(class_name, signature)
Пример #20
0
class PVDisplay(widgets.DOMWidget):
    """A ParaView interactive render widget"""
    _view_name = Unicode('PVDisplayView').tag(sync=True)
    _model_name = Unicode('PVDisplayModel').tag(sync=True)
    _view_module = Unicode('ipyparaview').tag(sync=True)
    _model_module = Unicode('ipyparaview').tag(sync=True)
    _view_module_version = Unicode('^0.1.2').tag(sync=True)
    _model_module_version = Unicode('^0.1.2').tag(sync=True)

    # traitlets -- variables synchronized with front end
    frame = Bytes().tag(sync=True)
    compressedFrame = Bytes().tag(sync=True)
    resolution = Tuple((800, 500)).tag(sync=True)  #canvas resolution; w,h
    fpsLimit = Float(60.0).tag(sync=True)  #maximum render rate
    maxEventRate = Float(20.0).tag(
        sync=True)  #maximum number of mouse events/s

    # class variables
    instances = dict()
    rotateScale = 5.0

    @classmethod
    def GetOrCreate(cls, ren, runAsync=True, **kwargs):
        """
        Check if a PVDisplay instance already exists for the renderer. If yes, return that instance; otherwise, create a new one.
        """
        instance = cls.instances.get(ren, None)
        if instance is None:
            instance = PVDisplay(ren, runAsync, **kwargs)
            cls.instances.update({ren: instance})
        return instance

    def __init__(self, ren, runAsync=True, compressFrames=False, **kwargs):
        # see if we can import Dask.distributed, then try guessing the render
        # mode based on the type of ren. Fallback to regular Jupyter rendering
        # otherwise
        try:
            import dask.distributed as distributed
            if (type(ren) == list and type(ren[0]) == distributed.actor.Actor):
                self.mode = 'Dask'
            else:
                self.mode = 'Jupyter'
        except ImportError:
            self.mode = 'Jupyter'

        if self.mode == 'Jupyter' and ren in PVDisplay.instances:
            raise RuntimeError(
                f"A PVDisplay instance already exists for this renderer. Use PVDisplay.GetOrCreate() to avoid this error."
            )

        super(PVDisplay, self).__init__(**kwargs)  #must call super class init

        # regular vars
        self.compressFrames = compressFrames
        self.pvs, self.renv, self.w2i = None, None, None  #used for Jupyter kernel rendering
        self.master, self.renderers = None, []  #used for Dask rendering
        self.tp = time.time()  #time of latest render
        self.fps = 10.0
        self.fpsOut = []  #FPS output ipywidgets; passed in from Jupyter
        self.intyld = [0.05, 0.01]  #interaction yield--period and duration
        self.tiy = time.time()  #time of last interaction yield

        if self.mode == 'Dask':
            self.renderers = ren
            self.master = [r for r in self.renderers if r.rank == 0][0]
            self.resolution = tuple(
                self.master.run(lambda self: list(self.renv.ViewSize),
                                []).result())
            cf = self.master.run(lambda self: list(self.renv.CameraFocalPoint),
                                 []).result()
            cp = self.master.run(lambda self: list(self.renv.CameraPosition),
                                 []).result()
            self.camf = (cf[0], cf[1], cf[2])
            self.camp = (cp[0], cp[1], cp[2])
        else:
            import paraview.simple as pvs
            self.pvs = pvs
            self.renv = ren
            self.resolution = tuple(self.renv.ViewSize)

            cf = self.renv.CameraFocalPoint
            cp = self.renv.CameraPosition
            self.camf = (cf[0], cf[1], cf[2])
            self.camp = (cp[0], cp[1], cp[2])

            import vtk
            from vtk import vtkWindowToImageFilter
            self.w2i = vtkWindowToImageFilter()
            self.w2i.ReadFrontBufferOff()
            self.w2i.ShouldRerenderOff()
            self.w2i.SetInput(self.renv.SMProxy.GetRenderWindow())

        self.frameNum = 0
        self.FRBufSz = 10
        self.FRBuf = np.zeros(self.FRBufSz, dtype=np.float32)

        self.runAsync = runAsync
        if runAsync:
            self.renderThread = threading.Thread(target=self.__renderLoop)
            self.renderThread.start()

    #FIXME: starting the render loop thread outside of __init__ seems to create
    # a copy of the paraview.simple object, rather than using the one that's
    # part of the PVDisplay state; this causes PV to crash
    #def setAsync(self, on):
    #    if on and not self.runAsync:
    #        self.runAsync = on
    #        self.renderThread = threading.Thread(target=self.__renderLoop)
    #        self.renderThread.start()
    #    elif not on and self.runAsync:
    #        self.runAsync = False

    def addFPSDisplay(self, *w):
        """Add a widget to write FPS to"""
        for o in w:
            self.fpsOut.append(o)

    def updateCam(self):
        self.render()

    def render(self):
        if self.runAsync:
            return
        else:
            tc = time.time()
            if (1.0 / (tc - self.tp) < self.fpsLimit):
                self.__renderFrame()

    def fetchFrame(self):
        if self.mode == 'Dask':
            return self.master.fetchFrame().result()
        else:
            # Mathias's magic frame fetching snippet
            self.w2i.Modified()
            self.w2i.Update()
            imagedata = self.w2i.GetOutput()
            w, h, _ = imagedata.GetDimensions()
            from vtk.util.numpy_support import vtk_to_numpy
            imagedata_np = vtk_to_numpy(
                imagedata.GetPointData().GetScalars()).reshape((h, w, 3))
            return np.flipud(
                np.pad(imagedata_np, ((0, 0), (0, 0), (0, 1)),
                       mode='constant',
                       constant_values=255))

    def _handle_custom_msg(self, content, buffers):
        self.content = content
        if content['event'] == 'updateCam':
            self.updateCam()

        if content['event'] == 'rotate':
            self.__rotateCam(content['data'])
        if content['event'] == 'pan':
            self.__panCam(content['data'])
        if content['event'] == 'zoom':
            self.__zoomCam(content['data'])

    def __rotateCam(self, mouseDelta):
        #rotates the camera around the focus in spherical
        phiLim = 1.5175
        if self.mode == 'Dask':
            from dask.distributed import wait
            wait([
                r.rotateCam(mouseDelta, self.rotateScale, phiLim)
                for r in self.renderers
            ])
        else:
            (self.renv.CameraPosition, self.renv.CameraFocalPoint,
             self.renv.CameraViewUp) = rotateCameraTurntable(
                 mouseDelta, self.renv.CameraPosition,
                 self.renv.CameraFocalPoint, self.renv.CameraViewUp,
                 self.rotateScale, phiLim)

        self.render()

    def __panCam(self, mouseDelta):
        #moves the camera with a 1:1 relation to current focal point
        if self.mode == 'Dask':
            from dask.distributed import wait
            wait([r.panCam(mouseDelta) for r in self.renderers])
        else:
            (self.renv.CameraPosition, self.renv.CameraFocalPoint,
             self.renv.CameraViewUp) = panCameraTurntable(
                 mouseDelta, self.renv.CameraPosition,
                 self.renv.CameraFocalPoint, self.renv.CameraViewUp,
                 self.renv.CameraViewAngle)

        self.render()

    def __zoomCam(self, mouseDelta):
        #zooms by scaling the distance between camera and focus
        rlim = 0.00001  #minimum allowable radius
        if self.mode == 'Dask':
            from dask.distributed import wait
            wait([r.zoomCam(mouseDelta, rlim) for r in self.renderers])
        else:
            (self.renv.CameraPosition, self.renv.CameraFocalPoint,
             self.renv.CameraViewUp) = zoomCameraTurntable(
                 mouseDelta, self.renv.CameraPosition,
                 self.renv.CameraFocalPoint, self.renv.CameraViewUp, rlim)

        self.render()

    def __compressFrame(self, frame):
        img = Image.fromarray(frame[:, :, :3])
        bytesIO = BytesIO()
        img.save(bytesIO, format='jpeg', quality=50)
        img_str = base64.b64encode(bytesIO.getvalue())
        return img_str

    def __renderFrame(self):
        tc = time.time()
        self.FRBuf[self.frameNum % self.FRBufSz] = 1.0 / (tc - self.tp)
        self.tp = tc

        #set the camera position, render, and get the output frame
        if self.mode == 'Dask':
            from dask.distributed import wait
            wait([r.render() for r in self.renderers])
        else:
            self.pvs.Render(view=self.renv)
        uncompressedFrameNp = self.fetchFrame()
        if self.compressFrames:
            self.compressedFrame = self.__compressFrame(uncompressedFrameNp)
        else:
            self.frame = uncompressedFrameNp.tostring()
        self.frameNum += 1
        self.fps = np.average(self.FRBuf)
        if self.fpsOut is not None:
            for fo in self.fpsOut:
                fo.value = self.fps

    def __renderLoop(self):
        while self.runAsync:
            #check if it's time for an interaction yield; if so, do it
            if time.time() - self.tiy > self.intyld[0]:
                time.sleep(self.intyld[1])
                self.tiy = time.time()

            #sleep to keep FPS to fpsLimit
            time.sleep(max(0, 1.0 / self.fpsLimit - (time.time() - self.tp)))

            self.__renderFrame()
Пример #21
0
class PerspectiveBaseMixin(HasTraits):
    '''Perspective Base Mixin'''
    # Data (private)
    _data = Union(
        (List(default_value=[]), Dict(default_value={}))).tag(sync=True)
    _bin_data = Bytes().tag(sync=True)  # binary data
    _dat_orig = Any()

    # Data source
    datasrc = Unicode(default_value='').tag(sync=True)
    schema = Dict(default_value={}).tag(sync=True)

    # layout
    view = Unicode('hypergrid').tag(sync=True)
    columns = List(default_value=[]).tag(sync=True)
    rowpivots = List(trait=Unicode(), default_value=[]).tag(sync=True, o=True)
    columnpivots = List(trait=Unicode(), default_value=[]).tag(sync=True)
    aggregates = Dict(trait=Unicode(), default_value={}).tag(sync=True)
    sort = List(default_value=[]).tag(sync=True)
    index = Unicode(default_value='').tag(sync=True)
    limit = Int(default_value=-1).tag(sync=True)
    computedcolumns = List(trait=Dict, default_value=[]).tag(sync=True)
    filters = List(trait=List, default_value=[]).tag(sync=True)
    plugin_config = Dict(default_value={}).tag(sync=True)

    # show settings (currently broken)
    settings = Bool(True).tag(sync=True)

    # set perspective in embedded mode (work outside jlab)
    embed = Bool(False).tag(sync=True)

    # dark mode
    dark = Bool(False).tag(sync=True)

    # try to use apache arrow to transfer data
    transfer_as_arrow = Bool(True).tag(sync=True)

    def load(self, value):
        data_object = type_detect(value,
                                  schema=self.schema,
                                  columns=self.columns,
                                  transfer_as_arrow=self.transfer_as_arrow)
        self.datasrc = data_object.type
        if data_object.type in ('arrow'):
            self.schema = validate_schema(data_object.schema)
            self.columns = data_object.columns
            self._bin_data = data_object.data
            self._data = []
            return

        # len in case dataframe
        if len(data_object.data) and data_object.type:
            s = validate_schema(data_object.schema)
            self.schema = s

            computedcolumns = []
            if self.computedcolumns:
                for c in self.computedcolumns:
                    if c['name'] not in computedcolumns:
                        computedcolumns.append(c['name'])

            if not self.columns and 'columns' not in data_object.kwargs:
                columns = list(map(lambda x: str(x), data_object.columns))

                # reasonable default, pivot by default in non-grid view
                if not self.rowpivots and self.view != 'hypergrid':
                    if 'index' in columns:
                        self.rowpivots = ['index']
                        if 'index' in columns:
                            columns.remove('index')

                if self.computedcolumns:
                    for c in self.computedcolumns:
                        if c['name'] not in columns:
                            columns.append(c['name'])

                self.columns = columns + computedcolumns

            elif not self.columns and 'columns' in data_object.kwargs:
                columns = list(
                    map(lambda x: str(x), data_object.kwargs.pop('columns')))
                self.columns = columns + computedcolumns

        else:
            self.schema = {}

        for k, v in iteritems(data_object.kwargs):
            if not getattr(self, k):
                setattr(self, k, v)

        # set data last
        self._data = data_object.data

    @validate('datasrc')
    def _validate_datasrc(self, proposal):
        return proposal.value  # validated elsewhere

    @validate('schema')
    def _validate_schema(self, proposal):
        return proposal.value  # validated elsewhere

    @validate('view')
    def _validate_view(self, proposal):
        return validate_view(proposal.value)

    @validate('columns')
    def _validate_columns(self, proposal):
        return validate_columns(proposal.value)

    @validate('rowpivots')
    def _validate_rowpivots(self, proposal):
        return validate_rowpivots(proposal.value)

    @validate('columnpivots')
    def _validate_columnpivots(self, proposal):
        return validate_columnpivots(proposal.value)

    @validate('aggregates')
    def _validate_aggregates(self, proposal):
        return validate_aggregates(proposal.value)

    @validate('sort')
    def _validate_sort(self, proposal):
        return validate_sort(proposal.value)

    @validate('computedcolumns')
    def _validate_computedcolumns(self, proposal):
        return validate_computedcolumns(proposal.value, self.columns)

    @validate('filters')
    def _validate_filters(self, proposal):
        return validate_filters(proposal.value, self.columns)

    @validate('plugin_config')
    def _validate_plugin_config(self, proposal):
        return validate_plugin_config(proposal.value)

    def _as_json(self, data_only=False, allow_nan=False):
        if data_only:
            if self.datasrc in ('arrow', ):
                return getattr(self, '_bin_data')
            else:
                return json.dumps(getattr(self, '_data'), allow_nan=allow_nan)

        ret = {}
        if self.datasrc in ('arrow'):
            ret['data'] = 'ARROW'
        else:
            ret['data'] = getattr(self, '_data')
        ret['datasrc'] = getattr(self, 'datasrc')
        ret['schema'] = getattr(self, 'schema')
        ret['view'] = getattr(self, 'view')
        ret['columns'] = getattr(self, 'columns')
        ret['rowpivots'] = getattr(self, 'rowpivots')
        ret['columnpivots'] = getattr(self, 'columnpivots')
        ret['aggregates'] = getattr(self, 'aggregates')
        ret['sort'] = getattr(self, 'sort')
        ret['index'] = getattr(self, 'index')
        ret['limit'] = getattr(self, 'limit')
        ret['computedcolumns'] = getattr(self, 'computedcolumns')
        ret['filters'] = getattr(self, 'filters')
        ret['plugin_config'] = getattr(self, 'plugin_config')
        ret['settings'] = getattr(self, 'settings')
        ret['embed'] = getattr(self, 'embed')
        ret['dark'] = getattr(self, 'dark')
        return json.dumps(ret, allow_nan=allow_nan)

    def setup(self,
              data,
              view='hypergrid',
              schema=None,
              columns=None,
              rowpivots=None,
              columnpivots=None,
              aggregates=None,
              sort=None,
              index='',
              limit=-1,
              computedcolumns=None,
              filters=None,
              plugin_config=None,
              settings=True,
              embed=False,
              dark=False,
              transfer_as_arrow=False,
              *args,
              **kwargs):
        '''Setup perspective base class

        Arguments:
            data : dataframe/list/dict
                The static or live datasource

        Keyword Arguments:
            view : str or View
                what view to use. available in the enum View (default: {'hypergrid'})
            columns : list of str
                what columns to display
            rowpivots : list of str
                what names to use as rowpivots
            columnpivots : list of str
                what names to use as columnpivots
            aggregates:  dict(str: str or Aggregate)
                dictionary of name to aggregate type (either string or enum Aggregate)
            index : str
                columns to use as index
            limit : int
                row limit
            computedcolumns : list of dict
                computed columns to set on the perspective viewer
            filters: list of list
                list of filters to apply to columns
            plugin_config: dict
                configuration dictionary to pass to perspective plugin
            settings : bool
                display settings
            embed : bool
                embedded mode
            dark : bool
                use dark theme

        '''
        self.transfer_as_arrow = transfer_as_arrow
        self.view = validate_view(view)
        self.schema = schema or {}
        self.sort = validate_sort(sort) or []
        self.index = index
        self.limit = limit
        self.settings = settings
        self.embed = embed
        self.dark = dark

        self.rowpivots = validate_rowpivots(rowpivots) or []
        self.columnpivots = validate_columnpivots(columnpivots) or []
        self.aggregates = validate_aggregates(aggregates) or {}

        self.columns = validate_columns(columns) or []
        self.computedcolumns = validate_computedcolumns(computedcolumns) or []

        self.filters = validate_filters(filters) or []
        self.plugin_config = validate_plugin_config(plugin_config) or {}

        self.load(data)
Пример #22
0
class BinderHub(Application):
    """An Application for starting a builder."""
    @default('log_level')
    def _log_level(self):
        return logging.INFO

    aliases = {
        'log-level': 'Application.log_level',
        'f': 'BinderHub.config_file',
        'config': 'BinderHub.config_file',
        'port': 'BinderHub.port',
    }

    flags = {
        'debug': ({
            'BinderHub': {
                'debug': True
            }
        }, "Enable debug HTTP serving & debug logging")
    }

    config_file = Unicode('binderhub_config.py',
                          help="""
        Config file to load.

        If a relative path is provided, it is taken relative to current directory
        """,
                          config=True)

    google_analytics_code = Unicode(None,
                                    allow_none=True,
                                    help="""
        The Google Analytics code to use on the main page.

        Note that we'll respect Do Not Track settings, despite the fact that GA does not.
        We will not load the GA scripts on browsers with DNT enabled.
        """,
                                    config=True)

    google_analytics_domain = Unicode('auto',
                                      help="""
        The Google Analytics domain to use on the main page.

        By default this is set to 'auto', which sets it up for current domain and all
        subdomains. This can be set to a more restrictive domain here for better privacy
        """,
                                      config=True)

    about_message = Unicode('',
                            help="""
        Additional message to display on the about page.

        Will be directly inserted into the about page's source so you can use
        raw HTML.
        """,
                            config=True)

    banner_message = Unicode('',
                             help="""
        Message to display in a banner on all pages.

        The value will be inserted "as is" into a HTML <div> element
        with grey background, located at the top of the BinderHub pages. Raw
        HTML is supported.
        """,
                             config=True)

    extra_footer_scripts = Dict({},
                                help="""
        Extra bits of JavaScript that should be loaded in footer of each page.

        Only the values are set up as scripts. Keys are used only
        for sorting.

        Omit the <script> tag. This should be primarily used for
        analytics code.
        """,
                                config=True)

    base_url = Unicode('/',
                       help="The base URL of the entire application",
                       config=True)

    @validate('base_url')
    def _valid_base_url(self, proposal):
        if not proposal.value.startswith('/'):
            proposal.value = '/' + proposal.value
        if not proposal.value.endswith('/'):
            proposal.value = proposal.value + '/'
        return proposal.value

    badge_base_url = Union(trait_types=[Unicode(), Callable()],
                           help="""
        Base URL to use when generating launch badges.
        Can also be a function that is passed the current handler and returns
        the badge base URL, or "" for the default.

        For example, you could get the badge_base_url from a custom HTTP
        header, the Referer header, or from a request parameter
        """,
                           config=True)

    @default('badge_base_url')
    def _badge_base_url_default(self):
        return ''

    @validate('badge_base_url')
    def _valid_badge_base_url(self, proposal):
        if callable(proposal.value):
            return proposal.value
        # add a trailing slash only when a value is set
        if proposal.value and not proposal.value.endswith('/'):
            proposal.value = proposal.value + '/'
        return proposal.value

    cors_allow_origin = Unicode("",
                                help="""
        Origins that can access the BinderHub API.

        Sets the Access-Control-Allow-Origin header in the spawned
        notebooks. Set to '*' to allow any origin to access spawned
        notebook servers.

        See also BinderSpawner.cors_allow_origin in the binderhub spawner
        mixin for setting this property on the spawned notebooks.
        """,
                                config=True)

    auth_enabled = Bool(False,
                        help="""If JupyterHub authentication enabled,
        require user to login (don't create temporary users during launch) and
        start the new server for the logged in user.""",
                        config=True)

    port = Integer(8585,
                   help="""
        Port for the builder to listen on.
        """,
                   config=True)

    appendix = Unicode(
        help="""
        Appendix to pass to repo2docker

        A multi-line string of Docker directives to run.
        Since the build context cannot be affected,
        ADD will typically not be useful.

        This should be a Python string template.
        It will be formatted with at least the following names available:

        - binder_url: the shareable URL for the current image
          (e.g. for sharing links to the current Binder)
        - repo_url: the repository URL used to build the image
        """,
        config=True,
    )

    sticky_builds = Bool(
        False,
        help="""
        Attempt to assign builds for the same repository to the same node.

        In order to speed up re-builds of a repository all its builds will
        be assigned to the same node in the cluster.

        Note: This feature only works if you also enable docker-in-docker support.
        """,
        config=True,
    )

    use_registry = Bool(True,
                        help="""
        Set to true to push images to a registry & check for images in registry.

        Set to false to use only local docker images. Useful when running
        in a single node.
        """,
                        config=True)

    build_class = Type(Build,
                       help="""
        The class used to build repo2docker images.

        Must inherit from binderhub.build.Build
        """,
                       config=True)

    registry_class = Type(DockerRegistry,
                          help="""
        The class used to Query a Docker registry.

        Must inherit from binderhub.registry.DockerRegistry
        """,
                          config=True)

    per_repo_quota = Integer(
        0,
        help="""
        Maximum number of concurrent users running from a given repo.

        Limits the amount of Binder that can be consumed by a single repo.

        0 (default) means no quotas.
        """,
        config=True,
    )

    pod_quota = Integer(
        None,
        help="""
        The number of concurrent pods this hub has been designed to support.

        This quota is used as an indication for how much above or below the
        design capacity a hub is running.

        Attempts to launch new pods once the quota has been reached will fail.

        The default corresponds to no quota, 0 means the hub can't accept pods
        (maybe because it is in maintenance mode), and any positive integer
        sets the quota.
        """,
        allow_none=True,
        config=True,
    )

    per_repo_quota_higher = Integer(
        0,
        help="""
        Maximum number of concurrent users running from a higher-quota repo.

        Limits the amount of Binder that can be consumed by a single repo. This
        quota is a second limit for repos with special status. See the
        `high_quota_specs` parameter of RepoProvider classes for usage.

        0 (default) means no quotas.
        """,
        config=True,
    )

    log_tail_lines = Integer(
        100,
        help="""
        Limit number of log lines to show when connecting to an already running build.
        """,
        config=True,
    )

    push_secret = Unicode('binder-build-docker-config',
                          allow_none=True,
                          help="""
        A kubernetes secret object that provides credentials for pushing built images.
        """,
                          config=True)

    image_prefix = Unicode("",
                           help="""
        Prefix for all built docker images.

        If you are pushing to gcr.io, this would start with:
            gcr.io/<your-project-name>/

        Set according to whatever registry you are pushing to.

        Defaults to "", which is probably not what you want :)
        """,
                           config=True)

    build_memory_request = ByteSpecification(
        0,
        help="""
        Amount of memory to request when scheduling a build

        0 reserves no memory.

        This is used as the request for the pod that is spawned to do the building,
        even though the pod itself will not be using that much memory
        since the docker building is happening outside the pod.
        However, it makes kubernetes aware of the resources being used,
        and lets it schedule more intelligently.
        """,
        config=True,
    )
    build_memory_limit = ByteSpecification(
        0,
        help="""
        Max amount of memory allocated for each image build process.

        0 sets no limit.

        This is applied to the docker build itself via repo2docker,
        though it is also applied to our pod that submits the build,
        even though that pod will rarely consume much memory.
        Still, it makes it easier to see the resource limits in place via kubernetes.
        """,
        config=True,
    )

    debug = Bool(False,
                 help="""
        Turn on debugging.
        """,
                 config=True)

    build_docker_host = Unicode("/var/run/docker.sock",
                                config=True,
                                help="""
        The docker URL repo2docker should use to build the images.

        Currently, only paths are supported, and they are expected to be available on
        all the hosts.
        """)

    @validate('build_docker_host')
    def docker_build_host_validate(self, proposal):
        parts = urlparse(proposal.value)
        if parts.scheme != 'unix' or parts.netloc != '':
            raise TraitError(
                "Only unix domain sockets on same node are supported for build_docker_host"
            )
        return proposal.value

    build_docker_config = Dict(None,
                               allow_none=True,
                               help="""
        A dict which will be merged into the .docker/config.json of the build container (repo2docker)
        Here, you could for example pass proxy settings as described here:
        https://docs.docker.com/network/proxy/#configure-the-docker-client

        Note: if you provide your own push_secret, this values wont
        have an effect, as the push_secrets will overwrite
        .docker/config.json
        In this case, make sure that you include your config in your push_secret
        """,
                               config=True)

    hub_api_token = Unicode(
        help="""API token for talking to the JupyterHub API""",
        config=True,
    )

    @default('hub_api_token')
    def _default_hub_token(self):
        return os.environ.get('JUPYTERHUB_API_TOKEN', '')

    hub_url = Unicode(
        help="""
        The base URL of the JupyterHub instance where users will run.

        e.g. https://hub.mybinder.org/
        """,
        config=True,
    )

    hub_url_local = Unicode(
        help="""
        The base URL of the JupyterHub instance for local/internal traffic

        If local/internal network connections from the BinderHub process should access
        JupyterHub using a different URL than public/external traffic set this, default
        is hub_url
        """,
        config=True,
    )

    @default('hub_url_local')
    def _default_hub_url_local(self):
        return self.hub_url

    @validate('hub_url', 'hub_url_local')
    def _add_slash(self, proposal):
        """trait validator to ensure hub_url ends with a trailing slash"""
        if proposal.value is not None and not proposal.value.endswith('/'):
            return proposal.value + '/'
        return proposal.value

    build_namespace = Unicode(help="""
        Kubernetes namespace to spawn build pods in.

        Note that the push_secret must refer to a secret in this namespace.
        """,
                              config=True)

    @default('build_namespace')
    def _default_build_namespace(self):
        return os.environ.get('BUILD_NAMESPACE', 'default')

    build_image = Unicode('quay.io/jupyterhub/repo2docker:2021.08.0',
                          help="""
        The repo2docker image to be used for doing builds
        """,
                          config=True)

    build_node_selector = Dict({},
                               config=True,
                               help="""
        Select the node where build pod runs on.
        """)

    repo_providers = Dict(
        {
            'gh': GitHubRepoProvider,
            'gist': GistRepoProvider,
            'git': GitRepoProvider,
            'gl': GitLabRepoProvider,
            'zenodo': ZenodoProvider,
            'figshare': FigshareProvider,
            'hydroshare': HydroshareProvider,
            'dataverse': DataverseProvider,
        },
        config=True,
        help="""
        List of Repo Providers to register and try
        """)

    @validate('repo_providers')
    def _validate_repo_providers(self, proposal):
        """trait validator to ensure there is at least one repo provider"""
        if not proposal.value:
            raise TraitError("Please provide at least one repo provider")

        if any([
                not issubclass(provider, RepoProvider)
                for provider in proposal.value.values()
        ]):
            raise TraitError(
                "Repository providers should inherit from 'binderhub.RepoProvider'"
            )

        return proposal.value

    concurrent_build_limit = Integer(
        32, config=True, help="""The number of concurrent builds to allow.""")
    executor_threads = Integer(
        5,
        config=True,
        help="""The number of threads to use for blocking calls

        Should generally be a small number because we don't
        care about high concurrency here, just not blocking the webserver.
        This executor is not used for long-running tasks (e.g. builds).
        """,
    )
    build_cleanup_interval = Integer(
        60,
        config=True,
        help=
        """Interval (in seconds) for how often stopped build pods will be deleted."""
    )
    build_max_age = Integer(3600 * 4,
                            config=True,
                            help="""Maximum age of builds

        Builds that are still running longer than this
        will be killed.
        """)

    build_token_check_origin = Bool(
        True,
        config=True,
        help="""Whether to validate build token origin.

        False disables the origin check.
        """)

    build_token_expires_seconds = Integer(
        300,
        config=True,
        help="""Expiry (in seconds) of build tokens

        These are generally only used to authenticate a single request
        from a page, so should be short-lived.
        """,
    )

    build_token_secret = Union(
        [Unicode(), Bytes()],
        config=True,
        help="""Secret used to sign build tokens

        Lightweight validation of same-origin requests
        """,
    )

    @validate("build_token_secret")
    def _validate_build_token_secret(self, proposal):
        if isinstance(proposal.value, str):
            # allow hex string for text-only input formats
            return a2b_hex(proposal.value)
        return proposal.value

    @default("build_token_secret")
    def _default_build_token_secret(self):
        if os.environ.get("BINDERHUB_BUILD_TOKEN_SECRET"):
            return a2b_hex(os.environ["BINDERHUB_BUILD_TOKEN_SECRET"])
        app_log.warning(
            "Generating random build token secret."
            " Set BinderHub.build_token_secret to avoid this warning.")
        return secrets.token_bytes(32)

    # FIXME: Come up with a better name for it?
    builder_required = Bool(True,
                            config=True,
                            help="""
        If binderhub should try to continue to run without a working build infrastructure.

        Build infrastructure is kubernetes cluster + docker. This is useful for pure HTML/CSS/JS local development.
        """)

    ban_networks = Dict(
        config=True,
        help="""
        Dict of networks from which requests should be rejected with 403

        Keys are CIDR notation (e.g. '1.2.3.4/32'),
        values are a label used in log / error messages.
        CIDR strings will be parsed with `ipaddress.ip_network()`.
        """,
    )

    @validate("ban_networks")
    def _cast_ban_networks(self, proposal):
        """Cast CIDR strings to IPv[4|6]Network objects"""
        networks = {}
        for cidr, message in proposal.value.items():
            networks[ipaddress.ip_network(cidr)] = message

        return networks

    ban_networks_min_prefix_len = Integer(
        1,
        help="The shortest prefix in ban_networks",
    )

    @observe("ban_networks")
    def _update_prefix_len(self, change):
        if not change.new:
            min_len = 1
        else:
            min_len = min(net.prefixlen for net in change.new)
        self.ban_networks_min_prefix_len = min_len or 1

    tornado_settings = Dict(config=True,
                            help="""
        additional settings to pass through to tornado.

        can include things like additional headers, etc.
        """)

    template_variables = Dict(
        config=True,
        help="Extra variables to supply to jinja templates when rendering.",
    )

    template_path = Unicode(
        help=
        "Path to search for custom jinja templates, before using the default templates.",
        config=True,
    )

    @default('template_path')
    def _template_path_default(self):
        return os.path.join(HERE, 'templates')

    extra_static_path = Unicode(
        help='Path to search for extra static files.',
        config=True,
    )

    extra_static_url_prefix = Unicode(
        '/extra_static/',
        help='Url prefix to serve extra static files.',
        config=True,
    )

    normalized_origin = Unicode(
        '',
        config=True,
        help=
        'Origin to use when emitting events. Defaults to hostname of request when empty'
    )

    allowed_metrics_ips = Set(
        help=
        'List of IPs or networks allowed to GET /metrics. Defaults to all.',
        config=True)

    @staticmethod
    def add_url_prefix(prefix, handlers):
        """add a url prefix to handlers"""
        for i, tup in enumerate(handlers):
            lis = list(tup)
            lis[0] = url_path_join(prefix, tup[0])
            handlers[i] = tuple(lis)
        return handlers

    def init_pycurl(self):
        try:
            AsyncHTTPClient.configure(
                "tornado.curl_httpclient.CurlAsyncHTTPClient")
        except ImportError as e:
            self.log.debug(
                "Could not load pycurl: %s\npycurl is recommended if you have a large number of users.",
                e)
        # set max verbosity of curl_httpclient at INFO
        # because debug-logging from curl_httpclient
        # includes every full request and response
        if self.log_level < logging.INFO:
            curl_log = logging.getLogger('tornado.curl_httpclient')
            curl_log.setLevel(logging.INFO)

    def initialize(self, *args, **kwargs):
        """Load configuration settings."""
        super().initialize(*args, **kwargs)
        self.load_config_file(self.config_file)
        # hook up tornado logging
        if self.debug:
            self.log_level = logging.DEBUG
        tornado.options.options.logging = logging.getLevelName(self.log_level)
        tornado.log.enable_pretty_logging()
        self.log = tornado.log.app_log

        self.init_pycurl()

        # initialize kubernetes config
        if self.builder_required:
            try:
                kubernetes.config.load_incluster_config()
            except kubernetes.config.ConfigException:
                kubernetes.config.load_kube_config()
            self.tornado_settings[
                "kubernetes_client"] = self.kube_client = kubernetes.client.CoreV1Api(
                )

        # times 2 for log + build threads
        self.build_pool = ThreadPoolExecutor(self.concurrent_build_limit * 2)
        # default executor for asyncifying blocking calls (e.g. to kubernetes, docker).
        # this should not be used for long-running requests
        self.executor = ThreadPoolExecutor(self.executor_threads)

        jinja_options = dict(autoescape=True, )
        template_paths = [self.template_path]
        base_template_path = self._template_path_default()
        if base_template_path not in template_paths:
            # add base templates to the end, so they are looked up at last after custom templates
            template_paths.append(base_template_path)
        loader = ChoiceLoader([
            # first load base templates with prefix
            PrefixLoader({'templates': FileSystemLoader([base_template_path])},
                         '/'),
            # load all templates
            FileSystemLoader(template_paths)
        ])
        jinja_env = Environment(loader=loader, **jinja_options)
        if self.use_registry:
            registry = self.registry_class(parent=self)
        else:
            registry = None

        self.launcher = Launcher(
            parent=self,
            hub_url=self.hub_url,
            hub_url_local=self.hub_url_local,
            hub_api_token=self.hub_api_token,
            create_user=not self.auth_enabled,
        )

        self.event_log = EventLog(parent=self)

        for schema_file in glob(os.path.join(HERE, 'event-schemas', '*.json')):
            with open(schema_file) as f:
                self.event_log.register_schema(json.load(f))

        self.tornado_settings.update({
            "log_function":
            log_request,
            "push_secret":
            self.push_secret,
            "image_prefix":
            self.image_prefix,
            "debug":
            self.debug,
            "hub_url":
            self.hub_url,
            "launcher":
            self.launcher,
            "appendix":
            self.appendix,
            "ban_networks":
            self.ban_networks,
            "ban_networks_min_prefix_len":
            self.ban_networks_min_prefix_len,
            "build_namespace":
            self.build_namespace,
            "build_image":
            self.build_image,
            "build_node_selector":
            self.build_node_selector,
            "build_pool":
            self.build_pool,
            "build_token_check_origin":
            self.build_token_check_origin,
            "build_token_secret":
            self.build_token_secret,
            "build_token_expires_seconds":
            self.build_token_expires_seconds,
            "sticky_builds":
            self.sticky_builds,
            "log_tail_lines":
            self.log_tail_lines,
            "pod_quota":
            self.pod_quota,
            "per_repo_quota":
            self.per_repo_quota,
            "per_repo_quota_higher":
            self.per_repo_quota_higher,
            "repo_providers":
            self.repo_providers,
            "rate_limiter":
            RateLimiter(parent=self),
            "use_registry":
            self.use_registry,
            "build_class":
            self.build_class,
            "registry":
            registry,
            "traitlets_config":
            self.config,
            "google_analytics_code":
            self.google_analytics_code,
            "google_analytics_domain":
            self.google_analytics_domain,
            "about_message":
            self.about_message,
            "banner_message":
            self.banner_message,
            "extra_footer_scripts":
            self.extra_footer_scripts,
            "jinja2_env":
            jinja_env,
            "build_memory_limit":
            self.build_memory_limit,
            "build_memory_request":
            self.build_memory_request,
            "build_docker_host":
            self.build_docker_host,
            "build_docker_config":
            self.build_docker_config,
            "base_url":
            self.base_url,
            "badge_base_url":
            self.badge_base_url,
            "static_path":
            os.path.join(HERE, "static"),
            "static_url_prefix":
            url_path_join(self.base_url, "static/"),
            "template_variables":
            self.template_variables,
            "executor":
            self.executor,
            "auth_enabled":
            self.auth_enabled,
            "event_log":
            self.event_log,
            "normalized_origin":
            self.normalized_origin,
            "allowed_metrics_ips":
            set(map(ipaddress.ip_network, self.allowed_metrics_ips))
        })
        if self.auth_enabled:
            self.tornado_settings['cookie_secret'] = os.urandom(32)
        if self.cors_allow_origin:
            self.tornado_settings.setdefault(
                'headers',
                {})['Access-Control-Allow-Origin'] = self.cors_allow_origin

        handlers = [
            (r'/metrics', MetricsHandler),
            (r'/versions', VersionHandler),
            (r"/build/([^/]+)/(.+)", BuildHandler),
            (r"/v2/([^/]+)/(.+)", ParameterizedMainHandler),
            (r"/repo/([^/]+)/([^/]+)(/.*)?", LegacyRedirectHandler),
            (r'/~([^/]+/.*)', UserRedirectHandler),
            # for backward-compatible mybinder.org badge URLs
            # /assets/images/badge.svg
            (r'/assets/(images/badge\.svg)', tornado.web.StaticFileHandler, {
                'path': self.tornado_settings['static_path']
            }),
            # /badge.svg
            (r'/(badge\.svg)', tornado.web.StaticFileHandler, {
                'path':
                os.path.join(self.tornado_settings['static_path'], 'images')
            }),
            # /badge_logo.svg
            (r'/(badge\_logo\.svg)', tornado.web.StaticFileHandler, {
                'path':
                os.path.join(self.tornado_settings['static_path'], 'images')
            }),
            # /logo_social.png
            (r'/(logo\_social\.png)', tornado.web.StaticFileHandler, {
                'path':
                os.path.join(self.tornado_settings['static_path'], 'images')
            }),
            # /favicon_XXX.ico
            (r'/(favicon\_fail\.ico)', tornado.web.StaticFileHandler, {
                'path':
                os.path.join(self.tornado_settings['static_path'], 'images')
            }),
            (r'/(favicon\_success\.ico)', tornado.web.StaticFileHandler, {
                'path':
                os.path.join(self.tornado_settings['static_path'], 'images')
            }),
            (r'/(favicon\_building\.ico)', tornado.web.StaticFileHandler, {
                'path':
                os.path.join(self.tornado_settings['static_path'], 'images')
            }),
            (r'/about', AboutHandler),
            (r'/health', HealthHandler, {
                'hub_url': self.hub_url_local
            }),
            (r'/_config', ConfigHandler),
            (r'/', MainHandler),
            (r'.*', Custom404),
        ]
        handlers = self.add_url_prefix(self.base_url, handlers)
        if self.extra_static_path:
            handlers.insert(-1, (re.escape(
                url_path_join(self.base_url, self.extra_static_url_prefix)) +
                                 r"(.*)", tornado.web.StaticFileHandler, {
                                     'path': self.extra_static_path
                                 }))
        if self.auth_enabled:
            oauth_redirect_uri = os.getenv('JUPYTERHUB_OAUTH_CALLBACK_URL') or \
                                 url_path_join(self.base_url, 'oauth_callback')
            oauth_redirect_uri = urlparse(oauth_redirect_uri).path
            handlers.insert(
                -1, (re.escape(oauth_redirect_uri), HubOAuthCallbackHandler))
        self.tornado_app = tornado.web.Application(handlers,
                                                   **self.tornado_settings)

    def stop(self):
        self.http_server.stop()
        self.build_pool.shutdown()

    async def watch_build_pods(self):
        """Watch build pods

        Every build_cleanup_interval:
        - delete stopped build pods
        - delete running build pods older than build_max_age
        """
        while True:
            try:
                await asyncio.wrap_future(
                    self.executor.submit(lambda: Build.cleanup_builds(
                        self.kube_client,
                        self.build_namespace,
                        self.build_max_age,
                    )))
            except Exception:
                app_log.exception("Failed to cleanup build pods")
            await asyncio.sleep(self.build_cleanup_interval)

    def start(self, run_loop=True):
        self.log.info("BinderHub starting on port %i", self.port)
        self.http_server = HTTPServer(
            self.tornado_app,
            xheaders=True,
        )
        self.http_server.listen(self.port)
        if self.builder_required:
            asyncio.ensure_future(self.watch_build_pods())
        if run_loop:
            tornado.ioloop.IOLoop.current().start()
Пример #23
0
class MultiTupleTrait(HasTraits):

    value = Tuple(Int(), Bytes(), default_value=[99,b'bottles'])
Пример #24
0
class Comm(LoggingConfigurable):
    """Class for communicating between a Frontend and a Kernel"""
    kernel = Instance('ipykernel.kernelbase.Kernel', allow_none=True)

    @default('kernel')
    def _default_kernel(self):
        if Kernel.initialized():
            return Kernel.instance()

    comm_id = Unicode()

    @default('comm_id')
    def _default_comm_id(self):
        return uuid.uuid4().hex

    primary = Bool(True, help="Am I the primary or secondary Comm?")

    target_name = Unicode('comm')
    target_module = Unicode(None,
                            allow_none=True,
                            help="""requirejs module from
        which to load comm target.""")

    topic = Bytes()

    @default('topic')
    def _default_topic(self):
        return ('comm-%s' % self.comm_id).encode('ascii')

    _open_data = Dict(help="data dict, if any, to be included in comm_open")
    _close_data = Dict(help="data dict, if any, to be included in comm_close")

    _msg_callback = Any()
    _close_callback = Any()

    _closed = Bool(True)

    def __init__(self,
                 target_name='',
                 data=None,
                 metadata=None,
                 buffers=None,
                 **kwargs):
        if target_name:
            kwargs['target_name'] = target_name
        super(Comm, self).__init__(**kwargs)
        if self.kernel:
            if self.primary:
                # I am primary, open my peer.
                self.open(data=data, metadata=metadata, buffers=buffers)
            else:
                self._closed = False

    def _publish_msg(self,
                     msg_type,
                     data=None,
                     metadata=None,
                     buffers=None,
                     **keys):
        """Helper for sending a comm message on IOPub"""
        data = {} if data is None else data
        metadata = {} if metadata is None else metadata
        content = json_clean(dict(data=data, comm_id=self.comm_id, **keys))
        self.kernel.session.send(
            self.kernel.iopub_socket,
            msg_type,
            content,
            metadata=json_clean(metadata),
            parent=self.kernel.get_parent("shell"),
            ident=self.topic,
            buffers=buffers,
        )

    def __del__(self):
        """trigger close on gc"""
        self.close(deleting=True)

    # publishing messages

    def open(self, data=None, metadata=None, buffers=None):
        """Open the frontend-side version of this comm"""
        if data is None:
            data = self._open_data
        comm_manager = getattr(self.kernel, 'comm_manager', None)
        if comm_manager is None:
            raise RuntimeError("Comms cannot be opened without a kernel "
                               "and a comm_manager attached to that kernel.")

        comm_manager.register_comm(self)
        try:
            self._publish_msg(
                'comm_open',
                data=data,
                metadata=metadata,
                buffers=buffers,
                target_name=self.target_name,
                target_module=self.target_module,
            )
            self._closed = False
        except Exception:
            comm_manager.unregister_comm(self)
            raise

    def close(self, data=None, metadata=None, buffers=None, deleting=False):
        """Close the frontend-side version of this comm"""
        if self._closed:
            # only close once
            return
        self._closed = True
        # nothing to send if we have no kernel
        # can be None during interpreter cleanup
        if not self.kernel:
            return
        if data is None:
            data = self._close_data
        self._publish_msg(
            'comm_close',
            data=data,
            metadata=metadata,
            buffers=buffers,
        )
        if not deleting:
            # If deleting, the comm can't be registered
            self.kernel.comm_manager.unregister_comm(self)

    def send(self, data=None, metadata=None, buffers=None):
        """Send a message to the frontend-side version of this comm"""
        self._publish_msg(
            'comm_msg',
            data=data,
            metadata=metadata,
            buffers=buffers,
        )

    # registering callbacks

    def on_close(self, callback):
        """Register a callback for comm_close

        Will be called with the `data` of the close message.

        Call `on_close(None)` to disable an existing callback.
        """
        self._close_callback = callback

    def on_msg(self, callback):
        """Register a callback for comm_msg

        Will be called with the `data` of any comm_msg messages.

        Call `on_msg(None)` to disable an existing callback.
        """
        self._msg_callback = callback

    # handling of incoming messages

    def handle_close(self, msg):
        """Handle a comm_close message"""
        self.log.debug("handle_close[%s](%s)", self.comm_id, msg)
        if self._close_callback:
            self._close_callback(msg)

    def handle_msg(self, msg):
        """Handle a comm_msg message"""
        self.log.debug("handle_msg[%s](%s)", self.comm_id, msg)
        if self._msg_callback:
            shell = self.kernel.shell
            if shell:
                shell.events.trigger('pre_execute')
            self._msg_callback(msg)
            if shell:
                shell.events.trigger('post_execute')
Пример #25
0
class BytesTrait(HasTraits):

    value = Bytes(b'string')
Пример #26
0
class PVDisplay(widgets.DOMWidget):
    """A ParaView interactive render widget"""
    _view_name = Unicode('PVDisplayView').tag(sync=True)
    _model_name = Unicode('PVDisplayModel').tag(sync=True)
    _view_module = Unicode('ipyparaview').tag(sync=True)
    _model_module = Unicode('ipyparaview').tag(sync=True)
    _view_module_version = Unicode('^0.1.0').tag(sync=True)
    _model_module_version = Unicode('^0.1.0').tag(sync=True)

    # traitlets -- variables synchronized with front end
    frame = Bytes().tag(sync=True)
    resolution = Tuple((800, 500)).tag(sync=True)  #canvas resolution; w,h
    camf = Tuple((0, 0, 0)).tag(sync=True)
    camp = Tuple((1, 1, 1)).tag(sync=True)
    camu = Tuple((0, 1, 0)).tag(sync=True)  #TODO: interactively set this
    fpsLimit = Float(60.0).tag(sync=True)  #maximum render rate

    # regular vars
    pvs, renV, w2i = None, None, None  #used for Jupyter kernel rendering
    master, renderers = None, []  #used for Dask rendering
    mode = 'Jupyter'
    tp = time.time()  #time of latest render
    fps = 10.0
    fpsOut = []  #FPS output ipywidgets; passed in from Jupyter
    intyld = [0.05, 0.01]  #interaction yield--period and duration
    tiy = time.time()  #time of last interaction yield

    def __init__(self, ren, runAsync=True, **kwargs):
        super(PVDisplay, self).__init__(**kwargs)  #must call super class init

        import numpy as np

        # see if we can import Dask.distributed, then try guessing the render
        # mode based on the type of ren. Fallback to regular Jupyter rendering
        # otherwise
        try:
            import dask.distributed as distributed
            if (type(ren) == list and type(ren[0]) == distributed.actor.Actor):
                self.mode = 'Dask'
            else:
                self.mode = 'Jupyter'
        except ImportError:
            self.mode = 'Jupyter'

        if self.mode == 'Dask':
            self.renderers = ren
            self.master = [r for r in self.renderers if r.rank == 0][0]
            self.resolution = tuple(
                self.master.run(lambda self: list(self.renV.ViewSize),
                                []).result())
            cf = self.master.run(lambda self: list(self.renV.CameraFocalPoint),
                                 []).result()
            cp = self.master.run(lambda self: list(self.renV.CameraPosition),
                                 []).result()
            self.camf = (cf[0], cf[1], cf[2])
            self.camp = (cp[0], cp[1], cp[2])
        else:
            import paraview.simple as pvs
            self.pvs = pvs
            self.renV = ren
            self.resolution = tuple(self.renV.ViewSize)

            cf = self.renV.CameraFocalPoint
            cp = self.renV.CameraPosition
            self.camf = (cf[0], cf[1], cf[2])
            self.camp = (cp[0], cp[1], cp[2])

            import vtk
            from vtk import vtkWindowToImageFilter
            self.w2i = vtkWindowToImageFilter()
            self.w2i.ReadFrontBufferOff()
            self.w2i.ShouldRerenderOff()
            self.w2i.SetInput(self.renV.SMProxy.GetRenderWindow())

        self.frameNum = 0
        self.FRBufSz = 10
        self.FRBuf = np.zeros(self.FRBufSz, dtype=np.float32)

        self.runAsync = runAsync
        if runAsync:
            self.renderThread = threading.Thread(target=self.__renderLoop)
            self.renderThread.start()

    #FIXME: starting the render loop thread outside of __init__ seems to create
    # a copy of the paraview.simple object, rather than using the one that's
    # part of the PVDisplay state; this causes PV to crash
    #def setAsync(self, on):
    #    if on and not self.runAsync:
    #        self.runAsync = on
    #        self.renderThread = threading.Thread(target=self.__renderLoop)
    #        self.renderThread.start()
    #    elif not on and self.runAsync:
    #        self.runAsync = False

    def addFPSDisplay(self, *w):
        """Add a widget to write FPS to"""
        for o in w:
            self.fpsOut.append(o)

    def updateCam(self):
        self.render()

    def render(self):
        if self.runAsync:
            return
        else:
            tc = time.time()
            if (1.0 / (tc - self.tp) < self.fpsLimit):
                self.__renderFrame()

    def fetchFrame(self):
        if self.mode == 'Dask':
            return self.master.fetchFrame().result()
        else:
            # Mathias's magic frame fetching snippet
            self.w2i.Modified()
            self.w2i.Update()
            imagedata = self.w2i.GetOutput()
            w, h, _ = imagedata.GetDimensions()
            import numpy as np
            from vtk.util.numpy_support import vtk_to_numpy
            imagedata_np = vtk_to_numpy(
                imagedata.GetPointData().GetScalars()).reshape((h, w, 3))
            return np.flipud(
                np.pad(imagedata_np, ((0, 0), (0, 0), (0, 1)),
                       mode='constant',
                       constant_values=255))

    def _handle_custom_msg(self, content, buffers):
        if content.get('event', '') == 'updateCam':
            self.updateCam()

    def __renderFrame(self):
        import numpy as np

        tc = time.time()
        self.FRBuf[self.frameNum % self.FRBufSz] = 1.0 / (tc - self.tp)
        self.tp = tc

        #set the camera position, render, and get the output frame
        if self.mode == 'Dask':
            from dask.distributed import wait
            wait([r.render(self.camp, self.camf) for r in self.renderers])
        else:
            self.renV.CenterOfRotation = self.renV.CameraFocalPoint = self.camf
            self.renV.CameraPosition = self.camp
            self.pvs.Render(view=self.renV)
        self.frame = self.fetchFrame().tostring()
        self.frameNum += 1
        self.fps = np.average(self.FRBuf)
        if self.fpsOut is not None:
            for fo in self.fpsOut:
                fo.value = self.fps

    def __renderLoop(self):
        while self.runAsync:
            #check if it's time for an interaction yield; if so, do it
            if time.time() - self.tiy > self.intyld[0]:
                time.sleep(self.intyld[1])
                self.tiy = time.time()

            #sleep to keep FPS to fpsLimit
            time.sleep(max(0, 1.0 / self.fpsLimit - (time.time() - self.tp)))

            self.__renderFrame()
Пример #27
0
class BroadcastScheduler(Scheduler):
    port_name = 'broadcast'
    accumulated_replies = {}
    accumulated_targets = {}
    is_leaf = Bool(False)
    connected_sub_scheduler_ids = List(Bytes())
    outgoing_streams = List()
    depth = Integer()
    max_depth = Integer()
    name = Unicode()

    def start(self):
        self.client_stream.on_recv(self.dispatch_submission, copy=False)
        if self.is_leaf:
            super().start()
        else:
            for outgoing_stream in self.outgoing_streams:
                outgoing_stream.on_recv(self.dispatch_result, copy=False)
        self.log.info(f"BroadcastScheduler {self.name} started")

    def send_to_targets(self, msg, original_msg_id, targets, idents,
                        is_coalescing):
        if is_coalescing:
            self.accumulated_replies[original_msg_id] = {
                target.encode('utf8'): None
                for target in targets
            }
            self.accumulated_targets[original_msg_id] = targets

        for target in targets:
            new_msg = self.append_new_msg_id_to_msg(
                self.get_new_msg_id(original_msg_id, target), target, idents,
                msg)
            self.engine_stream.send_multipart(new_msg, copy=False)

    def send_to_sub_schedulers(self, msg, original_msg_id, targets, idents,
                               is_coalescing):
        trunc = 2**self.max_depth
        fmt = f"0{self.max_depth + 1}b"

        # assign targets to sub-schedulers based on binary path
        # compute binary '010110' representation of the engine id
        targets_by_scheduler = [
            [] for i in range(len(self.connected_sub_scheduler_ids))
        ]
        for target_tuple in targets:
            path = format(target_tuple[1] % trunc, fmt)
            next_idx = int(path[self.depth + 1])  # 0 or 1
            targets_by_scheduler[next_idx].append(target_tuple)

        if is_coalescing:
            self.accumulated_replies[original_msg_id] = {
                scheduler_id: None
                for scheduler_id in self.connected_sub_scheduler_ids
            }
            self.accumulated_targets[original_msg_id] = {}

        for i, scheduler_id in enumerate(self.connected_sub_scheduler_ids):
            targets_for_scheduler = targets_by_scheduler[i]
            if is_coalescing:
                if targets_for_scheduler:
                    self.accumulated_targets[original_msg_id][
                        scheduler_id] = targets_for_scheduler
                else:
                    del self.accumulated_replies[original_msg_id][scheduler_id]
            msg['metadata']['targets'] = targets_for_scheduler

            new_msg = self.append_new_msg_id_to_msg(
                self.get_new_msg_id(original_msg_id, scheduler_id),
                scheduler_id,
                idents,
                msg,
            )
            self.outgoing_streams[i].send_multipart(new_msg, copy=False)

    def coalescing_reply(self, raw_msg, msg, original_msg_id, outgoing_id,
                         idents):
        # accumulate buffers
        self.accumulated_replies[original_msg_id][outgoing_id] = msg['buffers']
        if all(msg_buffers is not None for msg_buffers in
               self.accumulated_replies[original_msg_id].values()):
            replies = self.accumulated_replies.pop(original_msg_id)
            self.log.debug(
                f"Coalescing {len(replies)} reply to {original_msg_id}")
            targets = self.accumulated_targets.pop(original_msg_id)

            new_msg = msg.copy()
            # begin rebuilding message
            # metadata['targets']
            if self.is_leaf:
                new_msg['metadata']['broadcast_targets'] = targets
            else:
                new_msg['metadata']['broadcast_targets'] = []

            # avoid duplicated msg buffers
            buffers = []
            for sub_target, msg_buffers in replies.items():
                buffers.extend(msg_buffers)
                if not self.is_leaf:
                    new_msg['metadata']['broadcast_targets'].extend(
                        targets[sub_target])

            new_raw_msg = self.session.serialize(new_msg)
            self.client_stream.send_multipart(idents + new_raw_msg + buffers,
                                              copy=False)

    @util.log_errors
    def dispatch_submission(self, raw_msg):
        try:
            idents, msg_list = self.session.feed_identities(raw_msg,
                                                            copy=False)
            msg = self.session.deserialize(msg_list, content=False, copy=False)
        except:
            self.log.error(f'broadcast::Invalid broadcast msg: {raw_msg}',
                           exc_info=True)
            return
        metadata = msg['metadata']
        msg_id = msg['header']['msg_id']
        targets = metadata['targets']

        is_coalescing = metadata['is_coalescing']

        if 'original_msg_id' not in metadata:
            metadata['original_msg_id'] = msg_id

        original_msg_id = metadata['original_msg_id']
        if self.is_leaf:
            target_idents = [t[0] for t in targets]
            self.send_to_targets(msg, original_msg_id, target_idents, idents,
                                 is_coalescing)
        else:
            self.send_to_sub_schedulers(msg, original_msg_id, targets, idents,
                                        is_coalescing)

    @util.log_errors
    def dispatch_result(self, raw_msg):
        try:
            idents, msg = self.session.feed_identities(raw_msg, copy=False)
            msg = self.session.deserialize(msg, content=False, copy=False)
            outgoing_id = idents[0]

        except Exception:
            self.log.error(f'broadcast::Invalid broadcast msg: {raw_msg}',
                           exc_info=True)
            return
        original_msg_id = msg['metadata']['original_msg_id']
        is_coalescing = msg['metadata']['is_coalescing']
        if is_coalescing:
            self.coalescing_reply(raw_msg, msg, original_msg_id, outgoing_id,
                                  idents[1:])
        else:
            self.client_stream.send_multipart(raw_msg[1:], copy=False)