class States(HasTraits):
    active = Bool(default_value=True,
                  help='Denotes if agent is currently active')

    target_reached = Bool(default_value=False,
                          help='Denotes if agent has reached its target')

    # Navigation
    target = Int(
        default_value=NO_TARGET,
        min=NO_TARGET,
        help='Positive integer for target index, -1 for agent that do not have '
        'a target.')
    is_leader = Bool(default_value=False,
                     help='Boolean indicating if agent is leader')
    is_follower = Bool(
        default_value=False,
        help='Boolean indicating if agent is herding (following average '
        'direction of other agent).')
    index_leader = Int(
        default_value=NO_LEADER,
        help='Index of the agent that is the leader of this agent.')
    familiar_exit = Int(default_value=NO_TARGET,
                        min=NO_TARGET,
                        help='Target that is familiar to a follower agent.')
Example #2
0
class MyApp(Application):

    name = Unicode(u'myapp')
    running = Bool(False, config=True,
                   help="Is the app running?")
    classes = List([Bar, Foo])
    config_file = Unicode(u'', config=True,
                   help="Load this config file")

    aliases = Dict({
                    'i' : 'Foo.i',
                    'j' : 'Foo.j',
                    'name' : 'Foo.name',
                    'enabled' : 'Bar.enabled',
                    'log-level' : 'Application.log_level',
                })
    
    flags = Dict(dict(enable=({'Bar': {'enabled' : True}}, "Set Bar.enabled to True"),
                  disable=({'Bar': {'enabled' : False}}, "Set Bar.enabled to False"),
                  crit=({'Application' : {'log_level' : logging.CRITICAL}},
                        "set level=CRITICAL"),
            ))
    
    def init_foo(self):
        self.foo = Foo(parent=self)

    def init_bar(self):
        self.bar = Bar(parent=self)
Example #3
0
class MyApp(Application):

    name = Unicode(u'myapp')
    running = Bool(False, help="Is the app running?").tag(config=True)
    classes = List([Bar, Foo])
    config_file = Unicode(u'', help="Load this config file").tag(config=True)

    warn_tpyo = Unicode(u"yes the name is wrong on purpose", config=True,
            help="Should print a warning if `MyApp.warn-typo=...` command is passed")

    aliases = Dict({
                    'i' : 'Foo.i',
                    'j' : 'Foo.j',
                    'name' : 'Foo.name',
                    'enabled' : 'Bar.enabled',
                    'log-level' : 'Application.log_level',
                })
    
    flags = Dict(dict(enable=({'Bar': {'enabled' : True}}, "Set Bar.enabled to True"),
                  disable=({'Bar': {'enabled' : False}}, "Set Bar.enabled to False"),
                  crit=({'Application' : {'log_level' : logging.CRITICAL}},
                        "set level=CRITICAL"),
            ))
    
    def init_foo(self):
        self.foo = Foo(parent=self)

    def init_bar(self):
        self.bar = Bar(parent=self)
Example #4
0
class HdfConfig(Configurable):
    resolve_links = Bool(
        False,
        config=True,
        help=
        ("Whether soft and external links should be resolved when exploring HDF5 files."
         ))
Example #5
0
class Bar(Configurable):

    b = Integer(0, help="The integer b.").tag(config=True)
    enabled = Bool(True, help="Enable bar.").tag(config=True)
    tb = Tuple(()).tag(config=True, multiplicity='*')
    aset = Set().tag(config=True, multiplicity='+')
    bdict = Dict().tag(config=True)
Example #6
0
class FirstUseAuthenticator(Authenticator):
    """
    JupyterHub authenticator that lets users set password on first use.
    """
    dbm_path = Unicode(
        'passwords.dbm',
        config=True,
        help="""
        Path to store the db file with username / pwd hash in
        """
    )

    create_users = Bool(
        True,
        config=True,
        help="""
        Create users if they do not exist already.

        When set to false, users would have to be explicitly created before
        they can log in. Users can be created via the admin panel or by setting
        whitelist / admin list.
        """
    )

    def _user_exists(self, username):
        """
        Return true if given user already exists.

        Note: Depends on internal details of JupyterHub that might change
        across versions. Tested with v0.9
        """
        return self.db.query(User).filter_by(name=username).first() is not None

    @gen.coroutine
    def authenticate(self, handler, data):
        username = data['username']

        if not self.create_users:
            if not self._user_exists(username):
                return None

        password = data['password']
        with dbm.open(self.dbm_path, 'c', 0o600) as db:
            stored_pw = db.get(username.encode(), None)
            if stored_pw is not None:
                if bcrypt.hashpw(password.encode(), stored_pw) != stored_pw:
                    return None
            else:
                db[username] = bcrypt.hashpw(password.encode(), bcrypt.gensalt())
        return username

    def delete_user(self, user):
        """
        When user is deleted, remove their entry from password db.

        This lets passwords be reset by deleting users.
        """
        with dbm.open(self.dbm_path, 'c', 0o600) as db:
            del db[user.name]
Example #7
0
class IllumiDeskKubeSpawner(KubeSpawner):
    """Extends the KubeSpawner by defining the common behavior for our Spwaners that work
    with LTI versions 1.1 and 1.3
    """

    load_shared_folder_with_instructor = Bool(
        True,
        config=True,
        help=
        "Mount the shared folder with Instructor role (Used with shared_folder_enabled env-var).",
    )
Example #8
0
class FigureManager(Configurable):
    """
    For a given Viewer, encasulate the matplotlib Figures and associated tabs.
    """
    factories = List([
        LinePlotManager,
        LatestFrameImageManager],
        config=True)
    enabled = Bool(True, config=True)
    exclude_streams = Set([], config=True)

    def __init__(self, add_tab):
        self.update_config(load_config())
        self.add_tab = add_tab
        self._figures = {}

    def get_figure(self, key, label, *args, **kwargs):
        try:
            return self._figures[key]
        except KeyError:
            return self._add_figure(key, label, *args, **kwargs)

    def _add_figure(self, key, label, *args, **kwargs):
        tab = QWidget()
        fig, _ = plt.subplots(*args, **kwargs)
        canvas = FigureCanvas(fig)
        canvas.setMinimumWidth(640)
        canvas.setParent(tab)
        toolbar = NavigationToolbar(canvas, tab)
        tab_label = QLabel(label)
        tab_label.setMaximumHeight(20)

        layout = QVBoxLayout()
        layout.addWidget(tab_label)
        layout.addWidget(canvas)
        layout.addWidget(toolbar)
        tab.setLayout(layout)
        self.add_tab(tab, label)
        self._figures[key] = fig
        return fig

    def __call__(self, name, start_doc):
        if not self.enabled:
            return [], []
        dimensions = start_doc.get('hints', {}).get('dimensions', guess_dimensions(start_doc))
        rr = RunRouter(
            [factory(self, dimensions) for factory in self.factories])
        rr('start', start_doc)
        return [rr], []
Example #9
0
class Application(SingletonConfigurable):
    """A singleton application with full configuration support."""

    # The name of the application, will usually match the name of the command
    # line application
    name = Unicode(u'application')

    # The description of the application that is printed at the beginning
    # of the help.
    description = Unicode(u'This is an application.')
    # default section descriptions
    option_description = Unicode(option_description)
    keyvalue_description = Unicode(keyvalue_description)
    subcommand_description = Unicode(subcommand_description)

    python_config_loader_class = PyFileConfigLoader
    json_config_loader_class = JSONFileConfigLoader

    # The usage and example string that goes at the end of the help string.
    examples = Unicode()

    # A sequence of Configurable subclasses whose config=True attributes will
    # be exposed at the command line.
    classes = []

    def _classes_inc_parents(self):
        """Iterate through configurable classes, including configurable parents

        Children should always be after parents, and each class should only be
        yielded once.
        """
        seen = set()
        for c in self.classes:
            # We want to sort parents before children, so we reverse the MRO
            for parent in reversed(c.mro()):
                if issubclass(parent, Configurable) and (parent not in seen):
                    seen.add(parent)
                    yield parent

    # The version string of this application.
    version = Unicode(u'0.0')

    # the argv used to initialize the application
    argv = List()

    # Whether failing to load config files should prevent startup
    raise_config_file_errors = Bool(
        TRAITLETS_APPLICATION_RAISE_CONFIG_FILE_ERROR)

    # The log level for the application
    log_level = Enum(
        (0, 10, 20, 30, 40, 50, 'DEBUG', 'INFO', 'WARN', 'ERROR', 'CRITICAL'),
        default_value=logging.WARN,
        help="Set the log level by value or name.").tag(config=True)

    @observe('log_level')
    @observe_compat
    def _log_level_changed(self, change):
        """Adjust the log level when log_level is set."""
        new = change.new
        if isinstance(new, six.string_types):
            new = getattr(logging, new)
            self.log_level = new
        self.log.setLevel(new)

    _log_formatter_cls = LevelFormatter

    log_datefmt = Unicode(
        "%Y-%m-%d %H:%M:%S",
        help="The date format used by logging formatters for %(asctime)s").tag(
            config=True)

    log_format = Unicode(
        "[%(name)s]%(highlevel)s %(message)s",
        help="The Logging format template",
    ).tag(config=True)

    @observe('log_datefmt', 'log_format')
    @observe_compat
    def _log_format_changed(self, change):
        """Change the log formatter when log_format is set."""
        _log_handler = self.log.handlers[0]
        _log_formatter = self._log_formatter_cls(fmt=self.log_format,
                                                 datefmt=self.log_datefmt)
        _log_handler.setFormatter(_log_formatter)

    @default('log')
    def _log_default(self):
        """Start logging for this application.

        The default is to log to stderr using a StreamHandler, if no default
        handler already exists.  The log level starts at logging.WARN, but this
        can be adjusted by setting the ``log_level`` attribute.
        """
        log = logging.getLogger(self.__class__.__name__)
        log.setLevel(self.log_level)
        log.propagate = False
        _log = log  # copied from Logger.hasHandlers() (new in Python 3.2)
        while _log:
            if _log.handlers:
                return log
            if not _log.propagate:
                break
            else:
                _log = _log.parent
        if sys.executable.endswith('pythonw.exe'):
            # this should really go to a file, but file-logging is only
            # hooked up in parallel applications
            _log_handler = logging.StreamHandler(open(os.devnull, 'w'))
        else:
            _log_handler = logging.StreamHandler()
        _log_formatter = self._log_formatter_cls(fmt=self.log_format,
                                                 datefmt=self.log_datefmt)
        _log_handler.setFormatter(_log_formatter)
        log.addHandler(_log_handler)
        return log

    # the alias map for configurables
    aliases = Dict({'log-level': 'Application.log_level'})

    # flags for loading Configurables or store_const style flags
    # flags are loaded from this dict by '--key' flags
    # this must be a dict of two-tuples, the first element being the Config/dict
    # and the second being the help string for the flag
    flags = Dict()

    @observe('flags')
    @observe_compat
    def _flags_changed(self, change):
        """ensure flags dict is valid"""
        new = change.new
        for key, value in new.items():
            assert len(value) == 2, "Bad flag: %r:%s" % (key, value)
            assert isinstance(value[0],
                              (dict, Config)), "Bad flag: %r:%s" % (key, value)
            assert isinstance(
                value[1], six.string_types), "Bad flag: %r:%s" % (key, value)

    # subcommands for launching other applications
    # if this is not empty, this will be a parent Application
    # this must be a dict of two-tuples,
    # the first element being the application class/import string
    # and the second being the help string for the subcommand
    subcommands = Dict()
    # parse_command_line will initialize a subapp, if requested
    subapp = Instance('traitlets.config.application.Application',
                      allow_none=True)

    # extra command-line arguments that don't set config values
    extra_args = List(Unicode())

    def __init__(self, **kwargs):
        SingletonConfigurable.__init__(self, **kwargs)
        # Ensure my class is in self.classes, so my attributes appear in command line
        # options and config files.
        if self.__class__ not in self.classes:
            self.classes.insert(0, self.__class__)

    @observe('config')
    @observe_compat
    def _config_changed(self, change):
        super(Application, self)._config_changed(change)
        self.log.debug('Config changed:')
        self.log.debug(repr(change.new))

    @catch_config_error
    def initialize(self, argv=None):
        """Do the basic steps to configure me.

        Override in subclasses.
        """
        self.parse_command_line(argv)

    def start(self):
        """Start the app mainloop.

        Override in subclasses.
        """
        if self.subapp is not None:
            return self.subapp.start()

    def print_alias_help(self):
        """Print the alias part of the help."""
        if not self.aliases:
            return

        lines = []
        classdict = {}
        for cls in self.classes:
            # include all parents (up to, but excluding Configurable) in available names
            for c in cls.mro()[:-3]:
                classdict[c.__name__] = c

        for alias, longname in self.aliases.items():
            classname, traitname = longname.split('.', 1)
            cls = classdict[classname]

            trait = cls.class_traits(config=True)[traitname]
            help = cls.class_get_trait_help(trait).splitlines()
            # reformat first line
            help[0] = help[0].replace(longname, alias) + ' (%s)' % longname
            if len(alias) == 1:
                help[0] = help[0].replace('--%s=' % alias, '-%s ' % alias)
            lines.extend(help)
        # lines.append('')
        print(os.linesep.join(lines))

    def print_flag_help(self):
        """Print the flag part of the help."""
        if not self.flags:
            return

        lines = []
        for m, (cfg, help) in self.flags.items():
            prefix = '--' if len(m) > 1 else '-'
            lines.append(prefix + m)
            lines.append(indent(dedent(help.strip())))
        # lines.append('')
        print(os.linesep.join(lines))

    def print_options(self):
        if not self.flags and not self.aliases:
            return
        lines = ['Options']
        lines.append('-' * len(lines[0]))
        lines.append('')
        for p in wrap_paragraphs(self.option_description):
            lines.append(p)
            lines.append('')
        print(os.linesep.join(lines))
        self.print_flag_help()
        self.print_alias_help()
        print()

    def print_subcommands(self):
        """Print the subcommand part of the help."""
        if not self.subcommands:
            return

        lines = ["Subcommands"]
        lines.append('-' * len(lines[0]))
        lines.append('')
        for p in wrap_paragraphs(
                self.subcommand_description.format(app=self.name)):
            lines.append(p)
            lines.append('')
        for subc, (cls, help) in self.subcommands.items():
            lines.append(subc)
            if help:
                lines.append(indent(dedent(help.strip())))
        lines.append('')
        print(os.linesep.join(lines))

    def print_help(self, classes=False):
        """Print the help for each Configurable class in self.classes.

        If classes=False (the default), only flags and aliases are printed.
        """
        self.print_description()
        self.print_subcommands()
        self.print_options()

        if classes:
            help_classes = self.classes
            if help_classes:
                print("Class parameters")
                print("----------------")
                print()
                for p in wrap_paragraphs(self.keyvalue_description):
                    print(p)
                    print()

            for cls in help_classes:
                cls.class_print_help()
                print()
        else:
            print("To see all available configurables, use `--help-all`")
            print()

        self.print_examples()

    def document_config_options(self):
        """Generate rST format documentation for the config options this application

        Returns a multiline string.
        """
        return '\n'.join(c.class_config_rst_doc()
                         for c in self._classes_inc_parents())

    def print_description(self):
        """Print the application description."""
        for p in wrap_paragraphs(self.description):
            print(p)
            print()

    def print_examples(self):
        """Print usage and examples.

        This usage string goes at the end of the command line help string
        and should contain examples of the application's usage.
        """
        if self.examples:
            print("Examples")
            print("--------")
            print()
            print(indent(dedent(self.examples.strip())))
            print()

    def print_version(self):
        """Print the version string."""
        print(self.version)

    @catch_config_error
    def initialize_subcommand(self, subc, argv=None):
        """Initialize a subcommand with argv."""
        subapp, help = self.subcommands.get(subc)

        if isinstance(subapp, six.string_types):
            subapp = import_item(subapp)

        # clear existing instances
        self.__class__.clear_instance()
        # instantiate
        self.subapp = subapp.instance(config=self.config)
        # and initialize subapp
        self.subapp.initialize(argv)

    def flatten_flags(self):
        """flatten flags and aliases, so cl-args override as expected.
        
        This prevents issues such as an alias pointing to InteractiveShell,
        but a config file setting the same trait in TerminalInteraciveShell
        getting inappropriate priority over the command-line arg.

        Only aliases with exactly one descendent in the class list
        will be promoted.
        
        """
        # build a tree of classes in our list that inherit from a particular
        # it will be a dict by parent classname of classes in our list
        # that are descendents
        mro_tree = defaultdict(list)
        for cls in self.classes:
            clsname = cls.__name__
            for parent in cls.mro()[1:-3]:
                # exclude cls itself and Configurable,HasTraits,object
                mro_tree[parent.__name__].append(clsname)
        # flatten aliases, which have the form:
        # { 'alias' : 'Class.trait' }
        aliases = {}
        for alias, cls_trait in self.aliases.items():
            cls, trait = cls_trait.split('.', 1)
            children = mro_tree[cls]
            if len(children) == 1:
                # exactly one descendent, promote alias
                cls = children[0]
            aliases[alias] = '.'.join([cls, trait])

        # flatten flags, which are of the form:
        # { 'key' : ({'Cls' : {'trait' : value}}, 'help')}
        flags = {}
        for key, (flagdict, help) in self.flags.items():
            newflag = {}
            for cls, subdict in flagdict.items():
                children = mro_tree[cls]
                # exactly one descendent, promote flag section
                if len(children) == 1:
                    cls = children[0]
                newflag[cls] = subdict
            flags[key] = (newflag, help)
        return flags, aliases

    @catch_config_error
    def parse_command_line(self, argv=None):
        """Parse the command line arguments."""
        argv = sys.argv[1:] if argv is None else argv
        self.argv = [py3compat.cast_unicode(arg) for arg in argv]

        if argv and argv[0] == 'help':
            # turn `ipython help notebook` into `ipython notebook -h`
            argv = argv[1:] + ['-h']

        if self.subcommands and len(argv) > 0:
            # we have subcommands, and one may have been specified
            subc, subargv = argv[0], argv[1:]
            if re.match(r'^\w(\-?\w)*$', subc) and subc in self.subcommands:
                # it's a subcommand, and *not* a flag or class parameter
                return self.initialize_subcommand(subc, subargv)

        # Arguments after a '--' argument are for the script IPython may be
        # about to run, not IPython iteslf. For arguments parsed here (help and
        # version), we want to only search the arguments up to the first
        # occurrence of '--', which we're calling interpreted_argv.
        try:
            interpreted_argv = argv[:argv.index('--')]
        except ValueError:
            interpreted_argv = argv

        if any(x in interpreted_argv for x in ('-h', '--help-all', '--help')):
            self.print_help('--help-all' in interpreted_argv)
            self.exit(0)

        if '--version' in interpreted_argv or '-V' in interpreted_argv:
            self.print_version()
            self.exit(0)

        # flatten flags&aliases, so cl-args get appropriate priority:
        flags, aliases = self.flatten_flags()
        loader = KVArgParseConfigLoader(argv=argv,
                                        aliases=aliases,
                                        flags=flags,
                                        log=self.log)
        config = loader.load_config()
        self.update_config(config)
        # store unparsed args in extra_args
        self.extra_args = loader.extra_args

    @classmethod
    def _load_config_files(cls,
                           basefilename,
                           path=None,
                           log=None,
                           raise_config_file_errors=False):
        """Load config files (py,json) by filename and path.

        yield each config object in turn.
        """

        if not isinstance(path, list):
            path = [path]
        for path in path[::-1]:
            # path list is in descending priority order, so load files backwards:
            pyloader = cls.python_config_loader_class(basefilename + '.py',
                                                      path=path,
                                                      log=log)
            if log:
                log.debug("Looking for %s in %s", basefilename, path)
            jsonloader = cls.json_config_loader_class(basefilename + '.json',
                                                      path=path,
                                                      log=log)
            config = None
            for loader in [pyloader, jsonloader]:
                try:
                    config = loader.load_config()
                except ConfigFileNotFound:
                    pass
                except Exception:
                    # try to get the full filename, but it will be empty in the
                    # unlikely event that the error raised before filefind finished
                    filename = loader.full_filename or basefilename
                    # problem while running the file
                    if raise_config_file_errors:
                        raise
                    if log:
                        log.error("Exception while loading config file %s",
                                  filename,
                                  exc_info=True)
                else:
                    if log:
                        log.debug("Loaded config file: %s",
                                  loader.full_filename)
                if config:
                    yield config

        raise StopIteration

    @catch_config_error
    def load_config_file(self, filename, path=None):
        """Load config files by filename and path."""
        filename, ext = os.path.splitext(filename)
        loaded = []
        for config in self._load_config_files(
                filename,
                path=path,
                log=self.log,
                raise_config_file_errors=self.raise_config_file_errors,
        ):
            loaded.append(config)
            self.update_config(config)
        if len(loaded) > 1:
            collisions = loaded[0].collisions(loaded[1])
            if collisions:
                self.log.warning(
                    "Collisions detected in {0}.py and {0}.json config files."
                    " {0}.json has higher priority: {1}".format(
                        filename,
                        json.dumps(collisions, indent=2),
                    ))

    def generate_config_file(self):
        """generate default config file from Configurables"""
        lines = ["# Configuration file for %s." % self.name]
        lines.append('')
        for cls in self._classes_inc_parents():
            if not cls.class_own_traits(config=True):
                # skip classes with no config (Singleton, etc.)
                continue
            lines.append(cls.class_config_section())
        return '\n'.join(lines)

    def exit(self, exit_status=0):
        self.log.debug("Exiting application: %s" % self.name)
        sys.exit(exit_status)

    @classmethod
    def launch_instance(cls, argv=None, **kwargs):
        """Launch a global instance of this Application
        
        If a global instance already exists, this reinitializes and starts it
        """
        app = cls.instance(**kwargs)
        app.initialize(argv)
        app.start()
Example #10
0
class BaseJSONConfigManager(LoggingConfigurable):
    """General JSON config manager

    Deals with persisting/storing config in a json file with optionally
    default values in a {section_name}.d directory.
    """

    config_dir = Unicode(".")
    read_directory = Bool(True)

    def ensure_config_dir_exists(self):
        """Will try to create the config_dir directory."""
        try:
            os.makedirs(self.config_dir, 0o755)
        except OSError as e:
            if e.errno != errno.EEXIST:
                raise

    def file_name(self, section_name):
        """Returns the json filename for the section_name: {config_dir}/{section_name}.json"""
        return os.path.join(self.config_dir, section_name + ".json")

    def directory(self, section_name):
        """Returns the directory name for the section name: {config_dir}/{section_name}.d"""
        return os.path.join(self.config_dir, section_name + ".d")

    def get(self, section_name, include_root=True):
        """Retrieve the config data for the specified section.

        Returns the data as a dictionary, or an empty dictionary if the file
        doesn't exist.

        When include_root is False, it will not read the root .json file,
        effectively returning the default values.
        """
        paths = [self.file_name(section_name)] if include_root else []
        if self.read_directory:
            pattern = os.path.join(self.directory(section_name), "*.json")
            # These json files should be processed first so that the
            # {section_name}.json take precedence.
            # The idea behind this is that installing a Python package may
            # put a json file somewhere in the a .d directory, while the
            # .json file is probably a user configuration.
            paths = sorted(glob.glob(pattern)) + paths
        self.log.debug("Paths used for configuration of %s: \n\t%s",
                       section_name, "\n\t".join(paths))
        data = {}
        for path in paths:
            if os.path.isfile(path):
                with io.open(path, encoding="utf-8") as f:
                    recursive_update(data, json.load(f))
        return data

    def set(self, section_name, data):
        """Store the given config data."""
        filename = self.file_name(section_name)
        self.ensure_config_dir_exists()

        if self.read_directory:
            # we will modify data in place, so make a copy
            data = copy.deepcopy(data)
            defaults = self.get(section_name, include_root=False)
            remove_defaults(data, defaults)

        # Generate the JSON up front, since it could raise an exception,
        # in order to avoid writing half-finished corrupted data to disk.
        json_content = json.dumps(data, indent=2)
        if PY3:
            f = io.open(filename, "w", encoding="utf-8")
        else:
            f = open(filename, "wb")
        with f:
            f.write(json_content)

    def update(self, section_name, new_data):
        """Modify the config section by recursively updating it with new_data.

        Returns the modified config data as a dictionary.
        """
        data = self.get(section_name)
        recursive_update(data, new_data)
        self.set(section_name, data)
        return data
Example #11
0
class LinePlotManager(Configurable):
    """
    Manage the line plots for one FigureManager.
    """
    omit_single_point_plot = Bool(True, config=True)

    def __init__(self, fig_manager, dimensions):
        self.update_config(load_config())
        self.fig_manager = fig_manager
        self.start_doc = None
        self.dimensions = dimensions
        self.dim_streams = set(stream for _, stream in self.dimensions)
        if len(self.dim_streams) > 1:
            raise NotImplementedError

    def __call__(self, name, start_doc):
        self.start_doc = start_doc
        return [], [self.subfactory]

    def subfactory(self, name, descriptor_doc):
        if self.omit_single_point_plot and self.start_doc.get('num_points') == 1:
            return []
        if len(self.dimensions) > 1:
            return []  # This is a job for Grid.
        fields = set(hinted_fields(descriptor_doc))
        # Filter out the fields with a data type or shape that we cannot
        # represent in a line plot.
        for field in list(fields):
            dtype = descriptor_doc['data_keys'][field]['dtype']
            if dtype not in ('number', 'integer'):
                fields.discard(field)
            ndim = len(descriptor_doc['data_keys'][field]['shape'] or [])
            if ndim != 0:
                fields.discard(field)

        callbacks = []
        dim_stream, = self.dim_streams  # TODO Handle multiple dim_streams.
        if descriptor_doc.get('name') == dim_stream:
            dimension, = self.dimensions
            x_keys, stream_name = dimension
            fields -= set(x_keys)
            assert stream_name == dim_stream  # TODO Handle multiple dim_streams.
            for x_key in x_keys:
                figure_label = f'Scalars v {x_key}'
                fig = self.fig_manager.get_figure(
                    ('line', x_key, tuple(fields)), figure_label, len(fields), sharex=True)
                for y_key, ax in zip(fields, fig.axes):

                    log.debug('plot %s against %s', y_key, x_key)

                    ylabel = y_key
                    y_units = descriptor_doc['data_keys'][y_key].get('units')
                    ax.set_ylabel(y_key)
                    if y_units:
                        ylabel += f' [{y_units}]'
                    # Set xlabel only on lowest axes, outside for loop below.

                    def func(event_page, y_key=y_key):
                        """
                        Extract x points and y points to plot out of an EventPage.

                        This will be passed to LineWithPeaks.
                        """
                        y_data = event_page['data'][y_key]
                        if x_key == 'time':
                            t0 = self.start_doc['time']
                            x_data = numpy.asarray(event_page['time']) - t0
                        elif x_key == 'seq_num':
                            x_data = event_page['seq_num']
                        else:
                            x_data = event_page['data'][x_key]
                        return x_data, y_data

                    line = Line(func, ax=ax)
                    callbacks.append(line)

                if fields:
                    # Set the xlabel on the bottom-most axis.
                    if x_key == 'time':
                        xlabel = x_key
                        x_units = 's'
                    elif x_key == 'seq_num':
                        xlabel = 'sequence number'
                        x_units = None
                    else:
                        xlabel = x_key
                        x_units = descriptor_doc['data_keys'][x_key].get('units')
                    if x_units:
                        xlabel += f' [{x_units}]'
                    ax.set_xlabel(x_key)
                    fig.tight_layout()
            # TODO Plot other streams against time.
        for callback in callbacks:
            callback('start', self.start_doc)
            callback('descriptor', descriptor_doc)
        return callbacks
class FirstUseAuthenticator(Authenticator):
    """
    JupyterHub authenticator that lets users set password on first use.
    """
    dbm_path = Unicode('passwords.dbm',
                       config=True,
                       help="""
        Path to store the db file with username / pwd hash in
        """)

    create_users = Bool(True,
                        config=True,
                        help="""
        Create users if they do not exist already.

        When set to false, users would have to be explicitly created before
        they can log in. Users can be created via the admin panel or by setting
        whitelist / admin list.
        """)

    min_password_length = Integer(7,
                                  config=True,
                                  help="""
        The minimum length of the password when user is created.
        When set to 0, users will be allowed to set 0 length passwords.
        """)

    def _user_exists(self, username):
        """
        Return true if given user already exists.

        Note: Depends on internal details of JupyterHub that might change
        across versions. Tested with v0.9
        """
        return self.db.query(User).filter_by(name=username).first() is not None

    def _validate_password(self, password):
        return len(password) >= self.min_password_length

    def validate_username(self, name):
        invalid_chars = [',', ' ']
        if any((char in name) for char in invalid_chars):
            return False
        return super().validate_username(name)

    @gen.coroutine
    def authenticate(self, handler, data):
        username = data['username']

        if not self.create_users:
            if not self._user_exists(username):
                return None

        password = data['password']
        # Don't enforce password length requirement on existing users, since that can
        # lock users out of their hubs.

        with dbm.open(self.dbm_path, 'c', 0o600) as db:
            stored_pw = db.get(username.encode(), None)
            if stored_pw is not None:
                if bcrypt.hashpw(password.encode(), stored_pw) != stored_pw:
                    return None
            else:
                if not self._validate_password(password):
                    handler.custom_login_error = (
                        'Password too short! Please choose a password at least %d characters long.'
                        % self.min_password_length)
                    self.log.error(handler.custom_login_error)
                    return None
                db[username] = bcrypt.hashpw(password.encode(),
                                             bcrypt.gensalt())
        return username

    def delete_user(self, user):
        """
        When user is deleted, remove their entry from password db.

        This lets passwords be reset by deleting users.
        """
        try:
            with dbm.open(self.dbm_path, 'c', 0o600) as db:
                del db[user.name]
        except KeyError as k:
            pass

    def reset_password(self, username, new_password):
        """
        This allows changing the password of a logged user.
        """
        if not self._validate_password(new_password):
            login_err = (
                'Password too short! Please choose a password at least %d characters long.'
                % self.min_password_length)
            self.log.error(login_err)
            # Resetting the password will fail if the new password is too short.
            return login_err
        with dbm.open(self.dbm_path, 'c', 0o600) as db:
            db[username] = bcrypt.hashpw(new_password.encode(),
                                         bcrypt.gensalt())
        login_msg = "Your password has been changed successfully!"
        self.log.info(login_msg)
        return login_msg

    def get_handlers(self, app):
        return [(r'/login', CustomLoginHandler),
                (r'/auth/change-password', ResetPasswordHandler)]
Example #13
0
class IPKernelApp(BaseIPythonApplication, InteractiveShellApp, ConnectionFileMixin):
    name = "ipython-kernel"
    aliases = Dict(kernel_aliases)
    flags = Dict(kernel_flags)
    classes = [IPythonKernel, ZMQInteractiveShell, ProfileDir, Session]
    # the kernel class, as an importstring
    kernel_class = Type(
        "ipykernel.ipkernel.IPythonKernel",
        klass="ipykernel.kernelbase.Kernel",
        help="""The Kernel subclass to be used.

    This should allow easy re-use of the IPKernelApp entry point
    to configure and launch kernels other than IPython's own.
    """,
    ).tag(config=True)
    kernel = Any()
    poller = Any()  # don't restrict this even though current pollers are all Threads
    heartbeat = Instance(Heartbeat, allow_none=True)

    context = Any()
    shell_socket = Any()
    control_socket = Any()
    debugpy_socket = Any()
    debug_shell_socket = Any()
    stdin_socket = Any()
    iopub_socket = Any()
    iopub_thread = Any()
    control_thread = Any()

    _ports = Dict()

    subcommands = {
        "install": (
            "ipykernel.kernelspec.InstallIPythonKernelSpecApp",
            "Install the IPython kernel",
        ),
    }

    # connection info:
    connection_dir = Unicode()

    @default("connection_dir")
    def _default_connection_dir(self):
        return jupyter_runtime_dir()

    @property
    def abs_connection_file(self):
        if os.path.basename(self.connection_file) == self.connection_file:
            return os.path.join(self.connection_dir, self.connection_file)
        else:
            return self.connection_file

    # streams, etc.
    no_stdout = Bool(False, help="redirect stdout to the null device").tag(config=True)
    no_stderr = Bool(False, help="redirect stderr to the null device").tag(config=True)
    trio_loop = Bool(False, help="Set main event loop.").tag(config=True)
    quiet = Bool(True, help="Only send stdout/stderr to output stream").tag(config=True)
    outstream_class = DottedObjectName(
        "ipykernel.iostream.OutStream", help="The importstring for the OutStream factory"
    ).tag(config=True)
    displayhook_class = DottedObjectName(
        "ipykernel.displayhook.ZMQDisplayHook", help="The importstring for the DisplayHook factory"
    ).tag(config=True)

    capture_fd_output = Bool(
        True,
        help="""Attempt to capture and forward low-level output, e.g. produced by Extension libraries.
    """,
    ).tag(config=True)

    # polling
    parent_handle = Integer(
        int(os.environ.get("JPY_PARENT_PID") or 0),
        help="""kill this process if its parent dies.  On Windows, the argument
        specifies the HANDLE of the parent process, otherwise it is simply boolean.
        """,
    ).tag(config=True)
    interrupt = Integer(
        int(os.environ.get("JPY_INTERRUPT_EVENT") or 0),
        help="""ONLY USED ON WINDOWS
        Interrupt this process when the parent is signaled.
        """,
    ).tag(config=True)

    def init_crash_handler(self):
        sys.excepthook = self.excepthook

    def excepthook(self, etype, evalue, tb):
        # write uncaught traceback to 'real' stderr, not zmq-forwarder
        traceback.print_exception(etype, evalue, tb, file=sys.__stderr__)

    def init_poller(self):
        if sys.platform == "win32":
            if self.interrupt or self.parent_handle:
                self.poller = ParentPollerWindows(self.interrupt, self.parent_handle)
        elif self.parent_handle and self.parent_handle != 1:
            # PID 1 (init) is special and will never go away,
            # only be reassigned.
            # Parent polling doesn't work if ppid == 1 to start with.
            self.poller = ParentPollerUnix()

    def _try_bind_socket(self, s, port):
        iface = "%s://%s" % (self.transport, self.ip)
        if self.transport == "tcp":
            if port <= 0:
                port = s.bind_to_random_port(iface)
            else:
                s.bind("tcp://%s:%i" % (self.ip, port))
        elif self.transport == "ipc":
            if port <= 0:
                port = 1
                path = "%s-%i" % (self.ip, port)
                while os.path.exists(path):
                    port = port + 1
                    path = "%s-%i" % (self.ip, port)
            else:
                path = "%s-%i" % (self.ip, port)
            s.bind("ipc://%s" % path)
        return port

    def _bind_socket(self, s, port):
        try:
            win_in_use = errno.WSAEADDRINUSE  # type:ignore[attr-defined]
        except AttributeError:
            win_in_use = None

        # Try up to 100 times to bind a port when in conflict to avoid
        # infinite attempts in bad setups
        max_attempts = 1 if port else 100
        for attempt in range(max_attempts):
            try:
                return self._try_bind_socket(s, port)
            except zmq.ZMQError as ze:
                # Raise if we have any error not related to socket binding
                if ze.errno != errno.EADDRINUSE and ze.errno != win_in_use:
                    raise
                if attempt == max_attempts - 1:
                    raise

    def write_connection_file(self):
        """write connection info to JSON file"""
        cf = self.abs_connection_file
        self.log.debug("Writing connection file: %s", cf)
        write_connection_file(
            cf,
            ip=self.ip,
            key=self.session.key,
            transport=self.transport,
            shell_port=self.shell_port,
            stdin_port=self.stdin_port,
            hb_port=self.hb_port,
            iopub_port=self.iopub_port,
            control_port=self.control_port,
        )

    def cleanup_connection_file(self):
        cf = self.abs_connection_file
        self.log.debug("Cleaning up connection file: %s", cf)
        try:
            os.remove(cf)
        except OSError:
            pass

        self.cleanup_ipc_files()

    def init_connection_file(self):
        if not self.connection_file:
            self.connection_file = "kernel-%s.json" % os.getpid()
        try:
            self.connection_file = filefind(self.connection_file, [".", self.connection_dir])
        except OSError:
            self.log.debug("Connection file not found: %s", self.connection_file)
            # This means I own it, and I'll create it in this directory:
            os.makedirs(os.path.dirname(self.abs_connection_file), mode=0o700, exist_ok=True)
            # Also, I will clean it up:
            atexit.register(self.cleanup_connection_file)
            return
        try:
            self.load_connection_file()
        except Exception:
            self.log.error(
                "Failed to load connection file: %r", self.connection_file, exc_info=True
            )
            self.exit(1)

    def init_sockets(self):
        # Create a context, a session, and the kernel sockets.
        self.log.info("Starting the kernel at pid: %i", os.getpid())
        assert self.context is None, "init_sockets cannot be called twice!"
        self.context = context = zmq.Context()
        atexit.register(self.close)

        self.shell_socket = context.socket(zmq.ROUTER)
        self.shell_socket.linger = 1000
        self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
        self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)

        self.stdin_socket = context.socket(zmq.ROUTER)
        self.stdin_socket.linger = 1000
        self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
        self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)

        if hasattr(zmq, "ROUTER_HANDOVER"):
            # set router-handover to workaround zeromq reconnect problems
            # in certain rare circumstances
            # see ipython/ipykernel#270 and zeromq/libzmq#2892
            self.shell_socket.router_handover = self.stdin_socket.router_handover = 1

        self.init_control(context)
        self.init_iopub(context)

    def init_control(self, context):
        self.control_socket = context.socket(zmq.ROUTER)
        self.control_socket.linger = 1000
        self.control_port = self._bind_socket(self.control_socket, self.control_port)
        self.log.debug("control ROUTER Channel on port: %i" % self.control_port)

        self.debugpy_socket = context.socket(zmq.STREAM)
        self.debugpy_socket.linger = 1000

        self.debug_shell_socket = context.socket(zmq.DEALER)
        self.debug_shell_socket.linger = 1000
        if self.shell_socket.getsockopt(zmq.LAST_ENDPOINT):
            self.debug_shell_socket.connect(self.shell_socket.getsockopt(zmq.LAST_ENDPOINT))

        if hasattr(zmq, "ROUTER_HANDOVER"):
            # set router-handover to workaround zeromq reconnect problems
            # in certain rare circumstances
            # see ipython/ipykernel#270 and zeromq/libzmq#2892
            self.control_socket.router_handover = 1

        self.control_thread = ControlThread(daemon=True)

    def init_iopub(self, context):
        self.iopub_socket = context.socket(zmq.PUB)
        self.iopub_socket.linger = 1000
        self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
        self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)
        self.configure_tornado_logger()
        self.iopub_thread = IOPubThread(self.iopub_socket, pipe=True)
        self.iopub_thread.start()
        # backward-compat: wrap iopub socket API in background thread
        self.iopub_socket = self.iopub_thread.background_socket

    def init_heartbeat(self):
        """start the heart beating"""
        # heartbeat doesn't share context, because it mustn't be blocked
        # by the GIL, which is accessed by libzmq when freeing zero-copy messages
        hb_ctx = zmq.Context()
        self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port))
        self.hb_port = self.heartbeat.port
        self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
        self.heartbeat.start()

    def close(self):
        """Close zmq sockets in an orderly fashion"""
        # un-capture IO before we start closing channels
        self.reset_io()
        self.log.info("Cleaning up sockets")
        if self.heartbeat:
            self.log.debug("Closing heartbeat channel")
            self.heartbeat.context.term()
        if self.iopub_thread:
            self.log.debug("Closing iopub channel")
            self.iopub_thread.stop()
            self.iopub_thread.close()
        if self.control_thread and self.control_thread.is_alive():
            self.log.debug("Closing control thread")
            self.control_thread.stop()
            self.control_thread.join()

        if self.debugpy_socket and not self.debugpy_socket.closed:
            self.debugpy_socket.close()
        if self.debug_shell_socket and not self.debug_shell_socket.closed:
            self.debug_shell_socket.close()

        for channel in ("shell", "control", "stdin"):
            self.log.debug("Closing %s channel", channel)
            socket = getattr(self, channel + "_socket", None)
            if socket and not socket.closed:
                socket.close()
        self.log.debug("Terminating zmq context")
        self.context.term()
        self.log.debug("Terminated zmq context")

    def log_connection_info(self):
        """display connection info, and store ports"""
        basename = os.path.basename(self.connection_file)
        if (
            basename == self.connection_file
            or os.path.dirname(self.connection_file) == self.connection_dir
        ):
            # use shortname
            tail = basename
        else:
            tail = self.connection_file
        lines = [
            "To connect another client to this kernel, use:",
            "    --existing %s" % tail,
        ]
        # log connection info
        # info-level, so often not shown.
        # frontends should use the %connect_info magic
        # to see the connection info
        for line in lines:
            self.log.info(line)
        # also raw print to the terminal if no parent_handle (`ipython kernel`)
        # unless log-level is CRITICAL (--quiet)
        if not self.parent_handle and self.log_level < logging.CRITICAL:
            print(_ctrl_c_message, file=sys.__stdout__)
            for line in lines:
                print(line, file=sys.__stdout__)

        self._ports = dict(
            shell=self.shell_port,
            iopub=self.iopub_port,
            stdin=self.stdin_port,
            hb=self.hb_port,
            control=self.control_port,
        )

    def init_blackhole(self):
        """redirects stdout/stderr to devnull if necessary"""
        if self.no_stdout or self.no_stderr:
            blackhole = open(os.devnull, "w")
            if self.no_stdout:
                sys.stdout = sys.__stdout__ = blackhole
            if self.no_stderr:
                sys.stderr = sys.__stderr__ = blackhole

    def init_io(self):
        """Redirect input streams and set a display hook."""
        if self.outstream_class:
            outstream_factory = import_item(str(self.outstream_class))
            if sys.stdout is not None:
                sys.stdout.flush()

            e_stdout = None if self.quiet else sys.__stdout__
            e_stderr = None if self.quiet else sys.__stderr__

            if not self.capture_fd_output:
                outstream_factory = partial(outstream_factory, watchfd=False)

            sys.stdout = outstream_factory(self.session, self.iopub_thread, "stdout", echo=e_stdout)
            if sys.stderr is not None:
                sys.stderr.flush()
            sys.stderr = outstream_factory(self.session, self.iopub_thread, "stderr", echo=e_stderr)
            if hasattr(sys.stderr, "_original_stdstream_copy"):

                for handler in self.log.handlers:
                    if isinstance(handler, StreamHandler) and (handler.stream.buffer.fileno() == 2):
                        self.log.debug("Seeing logger to stderr, rerouting to raw filedescriptor.")

                        handler.stream = TextIOWrapper(
                            FileIO(
                                sys.stderr._original_stdstream_copy,  # type:ignore[attr-defined]
                                "w",
                            )
                        )
        if self.displayhook_class:
            displayhook_factory = import_item(str(self.displayhook_class))
            self.displayhook = displayhook_factory(self.session, self.iopub_socket)
            sys.displayhook = self.displayhook

        self.patch_io()

    def reset_io(self):
        """restore original io

        restores state after init_io
        """
        sys.stdout = sys.__stdout__
        sys.stderr = sys.__stderr__
        sys.displayhook = sys.__displayhook__

    def patch_io(self):
        """Patch important libraries that can't handle sys.stdout forwarding"""
        try:
            import faulthandler
        except ImportError:
            pass
        else:
            # Warning: this is a monkeypatch of `faulthandler.enable`, watch for possible
            # updates to the upstream API and update accordingly (up-to-date as of Python 3.5):
            # https://docs.python.org/3/library/faulthandler.html#faulthandler.enable

            # change default file to __stderr__ from forwarded stderr
            faulthandler_enable = faulthandler.enable

            def enable(file=sys.__stderr__, all_threads=True, **kwargs):
                return faulthandler_enable(file=file, all_threads=all_threads, **kwargs)

            faulthandler.enable = enable

            if hasattr(faulthandler, "register"):
                faulthandler_register = faulthandler.register

                def register(signum, file=sys.__stderr__, all_threads=True, chain=False, **kwargs):
                    return faulthandler_register(
                        signum, file=file, all_threads=all_threads, chain=chain, **kwargs
                    )

                faulthandler.register = register

    def init_signal(self):
        signal.signal(signal.SIGINT, signal.SIG_IGN)

    def init_kernel(self):
        """Create the Kernel object itself"""
        shell_stream = ZMQStream(self.shell_socket)
        control_stream = ZMQStream(self.control_socket, self.control_thread.io_loop)
        debugpy_stream = ZMQStream(self.debugpy_socket, self.control_thread.io_loop)
        self.control_thread.start()
        kernel_factory = self.kernel_class.instance

        kernel = kernel_factory(
            parent=self,
            session=self.session,
            control_stream=control_stream,
            debugpy_stream=debugpy_stream,
            debug_shell_socket=self.debug_shell_socket,
            shell_stream=shell_stream,
            control_thread=self.control_thread,
            iopub_thread=self.iopub_thread,
            iopub_socket=self.iopub_socket,
            stdin_socket=self.stdin_socket,
            log=self.log,
            profile_dir=self.profile_dir,
            user_ns=self.user_ns,
        )
        kernel.record_ports({name + "_port": port for name, port in self._ports.items()})
        self.kernel = kernel

        # Allow the displayhook to get the execution count
        self.displayhook.get_execution_count = lambda: kernel.execution_count

    def init_gui_pylab(self):
        """Enable GUI event loop integration, taking pylab into account."""

        # Register inline backend as default
        # this is higher priority than matplotlibrc,
        # but lower priority than anything else (mpl.use() for instance).
        # This only affects matplotlib >= 1.5
        if not os.environ.get("MPLBACKEND"):
            os.environ["MPLBACKEND"] = "module://matplotlib_inline.backend_inline"

        # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
        # to ensure that any exception is printed straight to stderr.
        # Normally _showtraceback associates the reply with an execution,
        # which means frontends will never draw it, as this exception
        # is not associated with any execute request.

        shell = self.shell
        assert shell is not None
        _showtraceback = shell._showtraceback
        try:
            # replace error-sending traceback with stderr
            def print_tb(etype, evalue, stb):
                print("GUI event loop or pylab initialization failed", file=sys.stderr)
                assert shell is not None
                print(shell.InteractiveTB.stb2text(stb), file=sys.stderr)

            shell._showtraceback = print_tb
            InteractiveShellApp.init_gui_pylab(self)
        finally:
            shell._showtraceback = _showtraceback

    def init_shell(self):
        self.shell = getattr(self.kernel, "shell", None)
        if self.shell:
            self.shell.configurables.append(self)

    def configure_tornado_logger(self):
        """Configure the tornado logging.Logger.

        Must set up the tornado logger or else tornado will call
        basicConfig for the root logger which makes the root logger
        go to the real sys.stderr instead of the capture streams.
        This function mimics the setup of logging.basicConfig.
        """
        logger = logging.getLogger("tornado")
        handler = logging.StreamHandler()
        formatter = logging.Formatter(logging.BASIC_FORMAT)
        handler.setFormatter(formatter)
        logger.addHandler(handler)

    def _init_asyncio_patch(self):
        """set default asyncio policy to be compatible with tornado

        Tornado 6 (at least) is not compatible with the default
        asyncio implementation on Windows

        Pick the older SelectorEventLoopPolicy on Windows
        if the known-incompatible default policy is in use.

        Support for Proactor via a background thread is available in tornado 6.1,
        but it is still preferable to run the Selector in the main thread
        instead of the background.

        do this as early as possible to make it a low priority and overrideable

        ref: https://github.com/tornadoweb/tornado/issues/2608

        FIXME: if/when tornado supports the defaults in asyncio without threads,
               remove and bump tornado requirement for py38.
               Most likely, this will mean a new Python version
               where asyncio.ProactorEventLoop supports add_reader and friends.

        """
        if sys.platform.startswith("win") and sys.version_info >= (3, 8):
            import asyncio

            try:
                from asyncio import (
                    WindowsProactorEventLoopPolicy,
                    WindowsSelectorEventLoopPolicy,
                )
            except ImportError:
                pass
                # not affected
            else:
                if type(asyncio.get_event_loop_policy()) is WindowsProactorEventLoopPolicy:
                    # WindowsProactorEventLoopPolicy is not compatible with tornado 6
                    # fallback to the pre-3.8 default of Selector
                    asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy())

    def init_pdb(self):
        """Replace pdb with IPython's version that is interruptible.

        With the non-interruptible version, stopping pdb() locks up the kernel in a
        non-recoverable state.
        """
        import pdb

        from IPython.core import debugger

        if hasattr(debugger, "InterruptiblePdb"):
            # Only available in newer IPython releases:
            debugger.Pdb = debugger.InterruptiblePdb
            pdb.Pdb = debugger.Pdb  # type:ignore[misc]
            pdb.set_trace = debugger.set_trace

    @catch_config_error
    def initialize(self, argv=None):
        self._init_asyncio_patch()
        super().initialize(argv)
        if self.subapp is not None:
            return

        self.init_pdb()
        self.init_blackhole()
        self.init_connection_file()
        self.init_poller()
        self.init_sockets()
        self.init_heartbeat()
        # writing/displaying connection info must be *after* init_sockets/heartbeat
        self.write_connection_file()
        # Log connection info after writing connection file, so that the connection
        # file is definitely available at the time someone reads the log.
        self.log_connection_info()
        self.init_io()
        try:
            self.init_signal()
        except Exception:
            # Catch exception when initializing signal fails, eg when running the
            # kernel on a separate thread
            if self.log_level < logging.CRITICAL:
                self.log.error("Unable to initialize signal:", exc_info=True)
        self.init_kernel()
        # shell init steps
        self.init_path()
        self.init_shell()
        if self.shell:
            self.init_gui_pylab()
            self.init_extensions()
            self.init_code()
        # flush stdout/stderr, so that anything written to these streams during
        # initialization do not get associated with the first execution request
        sys.stdout.flush()
        sys.stderr.flush()

    def start(self):
        if self.subapp is not None:
            return self.subapp.start()
        if self.poller is not None:
            self.poller.start()
        self.kernel.start()
        self.io_loop = ioloop.IOLoop.current()
        if self.trio_loop:
            from ipykernel.trio_runner import TrioRunner

            tr = TrioRunner()
            tr.initialize(self.kernel, self.io_loop)
            try:
                tr.run()
            except KeyboardInterrupt:
                pass
        else:
            try:
                self.io_loop.start()
            except KeyboardInterrupt:
                pass
Example #14
0
 class TestApp(Application):
     value = Unicode().tag(config=True)
     config_file_loaded = Bool().tag(config=True)
     aliases = {'v': 'TestApp.value'}
Example #15
0
class FileMetadataCache(SingletonConfigurable):
    """FileMetadataCache is used exclusively by FileMetadataStore to cache file-based metadata instances.

    FileMetadataCache utilizes a watchdog handler to monitor directories corresponding to
    any files it contains.  The handler is primarily used to determine which cached entries
    to remove (on delete operations).

    The cache is implemented as a simple LRU cache using an OrderedDict.
    """

    max_size = Integer(
        min=1,
        max=1024,
        default_value=128,
        config=True,
        help="The maximum number of entries allowed in the cache.")

    enabled = Bool(default_value=True,
                   config=True,
                   help="Caching is enabled (True) or disabled (False).")

    def __init__(self, **kwargs):
        super().__init__(**kwargs)

        self.hits: int = 0
        self.misses: int = 0
        self.trims: int = 0
        self._entries: OrderedDict = OrderedDict()
        if self.enabled:  # Only create and start an observer when enabled
            self.observed_dirs = set(
            )  # Tracks which directories are being watched
            self.observer = Observer()
            self.observer.start()
        else:
            self.log.info(
                "The file metadata cache is currently disabled via configuration. "
                "Set FileMetadataCache.enabled=True to enable instance caching."
            )

    def __len__(self) -> int:
        """Return the number of running kernels."""
        return len(self._entries)

    def __contains__(self, path: str) -> bool:
        return path in self._entries

    @caching_enabled
    def add_item(self, path: str, entry: Dict[str, Any]) -> None:
        """Adds the named entry and its entry to the cache.

        If this causes the cache to grow beyond its max size, the least recently
        used entry is removed.
        """
        md_dir: str = os.path.dirname(path)
        if md_dir not in self.observed_dirs and os.path.isdir(md_dir):
            self.observer.schedule(FileChangeHandler(self),
                                   md_dir,
                                   recursive=True)
            self.observed_dirs.add(md_dir)
        self._entries[path] = copy.deepcopy(entry)
        self._entries.move_to_end(path)
        if len(self._entries) > self.max_size:
            self.trims += 1
            self._entries.popitem(last=False)  # pop LRU entry

    @caching_enabled
    def get_item(self, path: str) -> Optional[Dict[str, Any]]:
        """Gets the named entry and returns its value or None if not present."""
        if path in self._entries:
            self.hits += 1
            self._entries.move_to_end(path)
            return copy.deepcopy(self._entries[path])

        self.misses += 1
        return None

    @caching_enabled
    def remove_item(self, path: str) -> Optional[Dict[str, Any]]:
        """Removes the named entry and returns its value or None if not present."""
        if path in self._entries:
            return self._entries.pop(path)

        return None
Example #16
0
class SSHHydraKernelProvisioner(FileManagementMixin, HydraKernelProvisioner):
    host = Unicode()
    user = Unicode()
    private_key_file = Unicode(allow_none=True)
    timeout = Int(DEFAULT_SSH_TIMEOUT)
    sudo = Bool(False)
    host_key_checking = Bool(
        False,
        help=
        ("If set, remote connections to hosts that do not have an entry in the "
         "system host key list will raise an error."),
    )

    connection = Instance("hydra_kernel.provisioning.ssh.SSHConnection",
                          allow_none=True)
    pid = Int(allow_none=True)

    _kernelspecs: "Dict" = None
    _subkernel_connection: "KernelConnectionInfo" = None
    _tunnels: "Dict[str, Tuple[str, int]]" = {}
    _tunnel_ctl_path: "str" = None

    @property
    def has_process(self) -> bool:
        if self.connection is None:
            return False
        if self.pid is None:
            return False
        return True

    def reset(self) -> None:
        self.connection = None
        self.pid = None
        self._kernelspecs = None
        self._tunnels = {}
        self._tunnel_ctl_path = None

    async def _save_host_key(self):
        hosts_file_path = pathlib.Path(pathlib.Path.home(), ".ssh",
                                       "known_hosts")
        hosts_file_path.parent.mkdir(exist_ok=True)
        hosts_file_path.touch()
        with hosts_file_path.open("r+") as hosts_file:
            start = f"# BEGIN hydra_kernel: {self.host}"
            end = f"# END hydra_kernel: {self.host}"
            lines = hosts_file.readlines()
            start_i, end_i = 0, 0
            for i, line in enumerate(lines):
                if line == start:
                    start_i = i
                elif line == end:
                    end_i = i
                    break
            # Splice out block
            lines = lines[:start_i] + lines[end_i:]

            proc = await asyncio.create_subprocess_exec(
                "ssh-keyscan",
                "-H",
                shlex.quote(self.host),
                stdout=asyncio.subprocess.PIPE,
                stderr=asyncio.subprocess.PIPE,
            )
            stdout, stderr = await proc.communicate()
            if proc.returncode != 0:
                LOG.warning((
                    f"Failed to update host key for {self.host}: {stderr.read()}"
                ))

            lines.append(start)
            lines.append(stdout.decode("utf-8"))
            lines.append(end)
            hosts_file.seek(0)
            hosts_file.write("\n".join(lines))

    async def pre_launch(self, **kwargs: "Any") -> "Dict[str, Any]":
        kwargs = await super().pre_launch(**kwargs)

        self.connection = SSHConnection(parent=self)

        # Check if desired kernel exists on remote
        self.binding.update_progress("Checking host kernels")
        if not await self.has_hydra_kernelspec(self.subkernel_name):
            self.binding.update_progress(
                f"Installing {self.subkernel_name} kernel")
            await self.provision_hydra_kernelspec(self.subkernel_name)

        kwargs["cmd"] = [
            "hydra-agent",
            f"--kernel={self.subkernel_name}",
            f"--id={self.kernel_id}",
            f"--launcher=hydra-subkernel",
        ]

        return kwargs

    async def launch_kernel(self, command, **kwargs):
        self.binding.update_progress("Establishing secure connection")

        LOG.debug(f"{self.binding.name}: kernel_cmd={command}")
        subkernel = self.connection.exec_json(command, login=True)
        self._subkernel_connection = subkernel["connection"]
        LOG.debug(
            f"{self.binding.name}: connection={self._subkernel_connection}")

        conn_info = self._subkernel_connection.copy()

        if not self.host_key_checking:
            await self._save_host_key()

        for port_name in port_names:
            conn_info[port_name] = await self._tunnel_to_port(port_name)

        self.pid = int(subkernel["pid"])

        return conn_info

    async def send_signal(self, signum):
        try:
            self.connection.exec(f"kill -{signum} {self.pid}")
        except BindingConnectionError as exc:
            LOG.error(f"Failed to send signal: {exc}")

    async def poll(self) -> "Optional[int]":
        try:
            # TODO: also check status of tunnels here
            self.connection.exec(f"kill -0 {self.pid}")
        except OSError:
            return -1

    async def cleanup(self, restart: bool = False) -> None:
        for port_name, tunnel in self._tunnels.items():
            try:
                LOG.debug(
                    f"Killing {port_name} SSH tunnel (pid={tunnel['pid']})")
                os.kill(tunnel["pid"])
            except OSError:
                pass
        self._tunnels = {}

    async def has_hydra_kernelspec(self, kernel_name):
        try:
            ret, _, _ = self.connection.exec("which hydra-subkernel")
            if ret != 0:
                return False
        except RuntimeError as exc:
            LOG.error(
                f"Failed to check for hydra binaries on {self.binding_name}: {exc}"
            )
            return False

        if not self._kernelspecs:
            LOG.info(f"Fetching all kernel specs for '{self.binding.name}'")
            try:
                self._kernelspecs = self.connection.exec_json(
                    "jupyter kernelspec list --json --log-level ERROR",
                    login=True)["kernelspecs"]
            except RuntimeError as exc:
                LOG.warn(
                    f"Failed to list kernel specs on {self.binding.name}: {exc}"
                )
                return False

        for spec_name, spec_info in self._kernelspecs.items():
            lang = spec_info["spec"]["language"]
            if spec_name == kernel_name or lang == kernel_name:
                return True

        return False

    async def provision_hydra_kernelspec(self, kernel_name):
        ansible_dir = os.path.join(sys.prefix, "share", "hydra-kernel",
                                   "ansible")
        host_vars = {
            "ansible_host": self.host,
            "ansible_user": self.user,
            "ansible_become": self.sudo,
            "ansible_ssh_private_key_file":
            _expand_path(self.private_key_file),
            # TODO: handle "via"
        }

        with tempfile.TemporaryDirectory() as tmpdir:
            with redirect_stdout(io.StringIO()):
                runner = ansible_runner.run(
                    private_data_dir=tmpdir,
                    project_dir=ansible_dir,
                    inventory={"all": {
                        "hosts": {
                            "KERNEL": host_vars
                        }
                    }},
                    playbook="kernel_action.yml",
                    extravars={
                        "kernel_name": kernel_name,
                        "kernel_action": "install",
                    },
                    event_handler=self._on_ansible_event,
                    # Don't output to stdout, store as JSON instead
                    quiet=True,
                    json_mode=True,
                )
                LOG.debug(runner.stdout.read())
                if runner.status != "successful" or runner.errored:
                    raise RuntimeError(
                        f"Failed to install kernel {kernel_name}")

        # Invalidate kernelspecs as we have installed a new one
        self._kernelspecs = None

    async def upload_path(self, local_path: "str", remote_path: "str" = None):
        req_id = uuid.uuid4()
        tmp_archive = f"/tmp/{req_id}.tar.gz"

        self.binding.update_progress("Preparing upload")
        fd = self.prepare_upload(local_path)

        def _on_progress(filename, size, sent):
            self.binding.update_progress(
                f"Uploading ({math.floor((sent/size) * 100)}%)")

        self.connection.put_file(fd, tmp_archive, on_progress=_on_progress)

        self.binding.update_progress("Finishing")
        self.connection.exec(["tar", "xzf", tmp_archive, "-C", remote_path])
        self.connection.exec(["rm", "-f", tmp_archive])

    async def download_path(self,
                            remote_path: "str",
                            local_path: "str" = None):
        req_id = uuid.uuid4()
        tmp_archive = f"/tmp/{req_id}.tar.gz"

        self.binding.update_progress("Preparing download")
        self.connection.exec(
            ["tar", "czf", tmp_archive, "-C", remote_path, "."])

        def _on_progress(filename, size, sent):
            self.binding.update_progress(
                f"Downloading ({math.floor((sent/size) * 100)}%)")

        with self.connection.get_file(tmp_archive,
                                      on_progress=_on_progress) as archive_fd:
            self.binding.update_progress("Finishing")
            with tarfile.open(fileobj=archive_fd, mode="r") as tar:
                tar.extractall(local_path)
        self.connection.exec(["rm", "-f", tmp_archive])

    async def _tunnel_to_port(self,
                              port_name: "str",
                              lport: "int" = None) -> "int":
        stream = io.StringIO()
        error = None

        with redirect_stdout(stream):
            try:
                subkernel_conn = self._subkernel_connection
                self.binding.update_progress(
                    f"Starting kernel {port_name} tunnel")
                if not await self._is_tunnel_up():
                    await self._start_tunnel()
                if not lport:
                    lport = select_random_ports(1)[0]
                await self._forward_over_tunnel(lport,
                                                subkernel_conn[port_name])
                self._tunnels[port_name] = lport
            except (RuntimeError, TypeError) as exc:
                error = exc

        if error:
            stream.seek(0)
            LOG.error(f"error={error}, stdout={stream.read()}")
            raise RuntimeError(
                f"Failed to establish tunnel for {port_name}: {error}")

        return self._tunnels[port_name]

    @property
    def _ssh_host(self):
        return f"{self.user}@{self.host}"

    @property
    def _ssh_cmd(self):
        cmd = ["ssh"]
        if self.private_key_file:
            cmd.extend(["-i", _expand_path(self.private_key_file)])
        return cmd

    async def _start_tunnel(self):
        self._tunnel_ctl_path = pathlib.Path(
            tempfile.gettempdir(),
            f"{self.user}-{self.host.replace('.', '-')}.tunnel")
        if self._tunnel_ctl_path.exists():
            self._tunnel_ctl_path.unlink()

        cmd = self._ssh_cmd
        cmd.extend([
            "-fN",  # -f = background process, -N = don't run a command
            "-o",
            "ControlMaster=yes",
            "-o",
            f"ControlPath={self._tunnel_ctl_path}",
            "-o",
            "ServerAliveInterval=5",
            self._ssh_host,
        ])
        tunnel_proc = await asyncio.create_subprocess_exec(*cmd)
        _, stderr = await tunnel_proc.communicate()
        if tunnel_proc.returncode != 0:
            try:
                self._tunnel_ctl_path = None
            except:
                pass
            raise RuntimeError(
                f"Failed to establish SSH tunnel to {self.host}")

    async def _forward_over_tunnel(self, lport, rport):
        LOG.debug(f"_forward_over_tunnel: forwarding {lport} => {rport}")
        returncode, _, _ = await self._tunnel_command(
            ["forward", "-L", f"127.0.0.1:{lport}:127.0.0.1:{rport}"])

    async def _is_tunnel_up(self):
        if not self._tunnel_ctl_path:
            return False
        returncode, _, _ = await self._tunnel_command(["check"])
        return returncode == 0

    async def _tunnel_command(self, cmd: "list[str]"):
        proc = await asyncio.create_subprocess_exec(
            *self._ssh_cmd,
            "-o",
            f"ControlPath={self._tunnel_ctl_path}",
            "-O",
            *cmd,
            self._ssh_host,
            stdout=asyncio.subprocess.PIPE,
            stderr=asyncio.subprocess.PIPE,
        )
        stdout, stderr = await proc.communicate()
        return proc.returncode, stdout, stderr

    def _on_ansible_event(self, event):
        current_task = event.get("event_data", {}).get("task")
        if current_task:
            self.binding.update_progress(current_task)
Example #17
0
class FirstUseAuthenticator(Authenticator):
    """
    JupyterHub authenticator that lets users set password on first use.
    """
    dbm_path = Unicode('passwords.dbm',
                       config=True,
                       help="""
        Path to store the db file with username / pwd hash in
        """)

    create_users = Bool(True,
                        config=True,
                        help="""
        Create users if they do not exist already.

        When set to false, users would have to be explicitly created before
        they can log in. Users can be created via the admin panel or by setting
        whitelist / admin list.
        """)

    min_password_length = Integer(7,
                                  config=True,
                                  help="""
        The minimum length of the password when user is created.
        When set to 0, users will be allowed to set 0 length passwords.
        """)

    check_passwords_on_startup = Bool(
        True,
        config=True,
        help="""
        Check for non-normalized-username passwords on startup.

        Prior to 1.0, multiple passwords could be set for the same username,
        without normalization.

        When True, duplicate usernames will be detected and removed,
        and ensure all usernames are normalized.

        If any duplicates are found, a backup of the original is created,
        which can be inspected manually.

        Typically, this will only need to run once.
        """,
    )

    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        if self.check_passwords_on_startup:
            self._check_passwords()

    def _check_passwords(self):
        """Validation checks on the password database at startup

        Mainly checks for the presence of passwords for non-normalized usernames

        If a username is present only in one non-normalized form,
        it will be renamed to the normalized form.

        If multiple forms of the same normalized username are present,
        ensure that at least the normalized form is also present.
        It will continue to produce warnings until manual intervention removes the non-normalized entries.

        Non-normalized entries will never be used during login.
        """

        # it's nontrival to check for db existence, because there are so many extensions
        # and you don't give dbm a path, you give it a *base* name,
        # which may point to one or more paths.
        # There's no way to retrieve the actual path(s) for a db
        dbm_extensions = ("", ".db", ".pag", ".dir", ".dat", ".bak")
        dbm_files = list(
            filter(os.path.isfile,
                   (self.dbm_path + ext for ext in dbm_extensions)))
        if not dbm_files:
            # no database, nothing to do
            return

        backup_path = self.dbm_path + "-backup"
        backup_files = list(
            filter(os.path.isfile,
                   (backup_path + ext for ext in dbm_extensions)))

        collision_warning = (
            f"Duplicate password entries have been found, and stored in {backup_path!r}."
            f" Duplicate entries have been removed from {self.dbm_path!r}."
            f" If you are happy with the solution, you can delete the backup file(s): {' '.join(backup_files)}."
            " Or you can inspect the backup database with:\n"
            "    import dbm\n"
            f"    with dbm.open({backup_path!r}, 'r') as db:\n"
            "        for username in db.keys():\n"
            "            print(username, db[username])\n")

        if backup_files:
            self.log.warning(collision_warning)
            return

        # create a temporary backup of the passwords db
        # to be retained only if collisions are detected
        # or deleted if no collisions are detected
        backup_files = []
        for path in dbm_files:
            base, ext = os.path.splitext(path)
            if ext not in dbm_extensions:
                # catch weird names with '.' and no .db extension
                base = path
                ext = ""
            backup = f"{base}-backup{ext}"
            shutil.copyfile(path, backup)
            backup_files.append(backup)

        collision_found = False

        with dbm.open(self.dbm_path, "w") as db:
            # load the username:hashed_password dict
            passwords = {}
            for key in db.keys():
                passwords[key.decode("utf8")] = db[key]

            # normalization map
            # compute the full map before checking in case two non-normalized forms are used
            # keys are normalized usernames,
            # values are lists of all names present in the db
            # which normalize to the same user
            normalized_usernames = {}
            for username in passwords:
                normalized_username = self.normalize_username(username)
                normalized_usernames.setdefault(normalized_username,
                                                []).append(username)

            # check if any non-normalized usernames are in the db
            for normalized_username, usernames in normalized_usernames.items():
                # case 1. only one form, make sure it's stored in the normalized username
                if len(usernames) == 1:
                    username = usernames[0]
                    # case 1.a only normalized form, nothing to do
                    if username == normalized_username:
                        continue
                    # 1.b only one form, not normalized. Unambiguous to fix.
                    # move password from non-normalized to normalized.
                    self.log.warning(
                        f"Normalizing username in password db {username}->{normalized_username}"
                    )
                    db[normalized_username.encode(
                        "utf8")] = passwords[username]
                    del db[username]
                else:
                    # collision! Multiple passwords for the same Hub user with different normalization
                    # do not clear these automatically because the 'right' answer is ambiguous,
                    # but make sure the normalized_username is set,
                    # so that after upgrade, there is always a password set
                    # the non-normalized username passwords will never be used
                    # after jupyterhub-firstuseauthenticator 1.0
                    self.log.warning(
                        f"{len(usernames)} variations of the username {normalized_username} present in password database: {usernames}."
                        f" Only the password stored for the normalized {normalized_username} will be used."
                    )
                    collision_found = True
                    if normalized_username not in passwords:
                        # we choose usernames[0] as most likely to be the first entry
                        # this isn't guaranteed, but it's the best information we have
                        username = usernames[0]
                        self.log.warning(
                            f"Normalizing username in password db {username}->{normalized_username}"
                        )
                        db[normalized_username.encode(
                            "utf8")] = passwords[username]
                    for username in usernames:
                        if username != normalized_username:
                            self.log.warning(
                                f"Removing un-normalized username from password db {username}"
                            )
                            del db[username]

        if collision_found:
            self.log.warning(collision_warning)
        else:
            # remove backup files, if we didn't find anything to backup
            self.log.debug(
                f"No collisions found, removing backup files {backup_files}")
            for path in backup_files:
                try:
                    os.remove(path)
                except FileNotFoundError:
                    pass

    def _user_exists(self, username):
        """
        Return true if given user already exists.

        Note: Depends on internal details of JupyterHub that might change
        across versions. Tested with v0.9
        """
        return self.db.query(User).filter_by(name=username).first() is not None

    def _validate_password(self, password):
        return len(password) >= self.min_password_length

    def validate_username(self, name):
        invalid_chars = [',', ' ']
        if any((char in name) for char in invalid_chars):
            return False
        return super().validate_username(name)

    async def authenticate(self, handler, data):
        username = self.normalize_username(data["username"])
        password = data["password"]

        if not self.create_users:
            if not self._user_exists(username):
                return None

        with dbm.open(self.dbm_path, 'c', 0o600) as db:
            stored_pw = db.get(username.encode("utf8"), None)

            if stored_pw is not None:
                # for existing passwords: ensure password hash match
                if bcrypt.hashpw(password.encode("utf8"),
                                 stored_pw) != stored_pw:
                    return None
            else:
                # for new users: ensure password validity and store password hash
                if not self._validate_password(password):
                    handler.custom_login_error = (
                        'Password too short! Please choose a password at least %d characters long.'
                        % self.min_password_length)
                    self.log.error(handler.custom_login_error)
                    return None
                db[username] = bcrypt.hashpw(password.encode("utf8"),
                                             bcrypt.gensalt())

        return username

    def delete_user(self, user):
        """
        When user is deleted, remove their entry from password db.

        This lets passwords be reset by deleting users.
        """
        try:
            with dbm.open(self.dbm_path, 'c', 0o600) as db:
                del db[user.name]
        except KeyError:
            pass

    def reset_password(self, username, new_password):
        """
        This allows changing the password of a logged user.
        """
        if not self._validate_password(new_password):
            login_err = (
                'Password too short! Please choose a password at least %d characters long.'
                % self.min_password_length)
            self.log.error(login_err)
            # Resetting the password will fail if the new password is too short.
            return login_err
        with dbm.open(self.dbm_path, "c", 0o600) as db:
            db[username] = bcrypt.hashpw(new_password.encode("utf8"),
                                         bcrypt.gensalt())
        login_msg = "Your password has been changed successfully!"
        self.log.info(login_msg)
        return login_msg

    def get_handlers(self, app):
        return [
            (r"/login", CustomLoginHandler),
            (r"/auth/change-password", ResetPasswordHandler),
        ]
Example #18
0
class BaseJSONConfigManager(LoggingConfigurable):
    """General JSON config manager
    
    Deals with persisting/storing config in a json file with optionally
    default values in a {section_name}.d directory.
    """

    config_dir = Unicode('.')
    read_directory = Bool(True)

    def ensure_config_dir_exists(self):
        """Will try to create the config_dir directory."""
        try:
            os.makedirs(self.config_dir, 0o755)
        except OSError as e:
            if e.errno != errno.EEXIST:
                raise

    def file_name(self, section_name):
        """Returns the json filename for the section_name: {config_dir}/{section_name}.json"""
        return os.path.join(self.config_dir, section_name + '.json')

    def directory(self, section_name):
        """Returns the directory name for the section name: {config_dir}/{section_name}.d"""
        return os.path.join(self.config_dir, section_name + '.d')

    def get(self, section_name):
        """Retrieve the config data for the specified section.

        Returns the data as a dictionary, or an empty dictionary if the file
        doesn't exist.
        """
        paths = [self.file_name(section_name)]
        if self.read_directory:
            pattern = os.path.join(self.directory(section_name), '*.json')
            # These json files should be processed first so that the
            # {section_name}.json take precedence.
            # The idea behind this is that installing a Python package may
            # put a json file somewhere in the a .d directory, while the
            # .json file is probably a user configuration.
            paths = sorted(glob.glob(pattern)) + paths
        self.log.debug('Paths used for configuration of %s: \n\t%s',
                       section_name, '\n\t'.join(paths))
        data = {}
        for path in paths:
            if os.path.isfile(path):
                with io.open(path, encoding='utf-8') as f:
                    recursive_update(data, json.load(f))
        return data

    def set(self, section_name, data):
        """Store the given config data.
        """
        filename = self.file_name(section_name)
        self.ensure_config_dir_exists()

        if PY3:
            f = io.open(filename, 'w', encoding='utf-8')
        else:
            f = open(filename, 'wb')
        with f:
            json.dump(data, f, indent=2)

    def update(self, section_name, new_data):
        """Modify the config section by recursively updating it with new_data.

        Returns the modified config data as a dictionary.
        """
        data = self.get(section_name)
        recursive_update(data, new_data)
        self.set(section_name, data)
        return data
Example #19
0
class Sub3(Application):
    flag = Bool(False)
Example #20
0
class Application(SingletonConfigurable):
    """A singleton application with full configuration support."""

    # The name of the application, will usually match the name of the command
    # line application
    name = Unicode('application')

    # The description of the application that is printed at the beginning
    # of the help.
    description = Unicode('This is an application.')
    # default section descriptions
    option_description = Unicode(option_description)
    keyvalue_description = Unicode(keyvalue_description)
    subcommand_description = Unicode(subcommand_description)

    python_config_loader_class = PyFileConfigLoader
    json_config_loader_class = JSONFileConfigLoader

    # The usage and example string that goes at the end of the help string.
    examples = Unicode()

    # A sequence of Configurable subclasses whose config=True attributes will
    # be exposed at the command line.
    classes = []

    def _classes_inc_parents(self, classes=None):
        """Iterate through configurable classes, including configurable parents

        :param classes:
            The list of classes to iterate; if not set, uses :attr:`classes`.

        Children should always be after parents, and each class should only be
        yielded once.
        """
        if classes is None:
            classes = self.classes

        seen = set()
        for c in classes:
            # We want to sort parents before children, so we reverse the MRO
            for parent in reversed(c.mro()):
                if issubclass(parent, Configurable) and (parent not in seen):
                    seen.add(parent)
                    yield parent

    # The version string of this application.
    version = Unicode('0.0')

    # the argv used to initialize the application
    argv = List()

    # Whether failing to load config files should prevent startup
    raise_config_file_errors = Bool(
        TRAITLETS_APPLICATION_RAISE_CONFIG_FILE_ERROR)

    # The log level for the application
    log_level = Enum(
        (0, 10, 20, 30, 40, 50, 'DEBUG', 'INFO', 'WARN', 'ERROR', 'CRITICAL'),
        default_value=logging.WARN,
        help="Set the log level by value or name.").tag(config=True)

    @observe('log_level')
    @observe_compat
    def _log_level_changed(self, change):
        """Adjust the log level when log_level is set."""
        new = change.new
        if isinstance(new, str):
            new = getattr(logging, new)
            self.log_level = new
        self.log.setLevel(new)

    _log_formatter_cls = LevelFormatter

    log_datefmt = Unicode(
        "%Y-%m-%d %H:%M:%S",
        help="The date format used by logging formatters for %(asctime)s").tag(
            config=True)

    log_format = Unicode(
        "[%(name)s]%(highlevel)s %(message)s",
        help="The Logging format template",
    ).tag(config=True)

    @observe('log_datefmt', 'log_format')
    @observe_compat
    def _log_format_changed(self, change):
        """Change the log formatter when log_format is set."""
        _log_handler = self._get_log_handler()
        if not _log_handler:
            warnings.warn(
                "No Handler found on {self.log}, setting log_format will have no effect",
                RuntimeWarning,
            )
            return
        _log_formatter = self._log_formatter_cls(fmt=self.log_format,
                                                 datefmt=self.log_datefmt)
        _log_handler.setFormatter(_log_formatter)

    @default('log')
    def _log_default(self):
        """Start logging for this application.

        The default is to log to stderr using a StreamHandler, if no default
        handler already exists.  The log level starts at logging.WARN, but this
        can be adjusted by setting the ``log_level`` attribute.
        """
        log = logging.getLogger(self.__class__.__name__)
        log.setLevel(self.log_level)
        log.propagate = False
        _log = log  # copied from Logger.hasHandlers() (new in Python 3.2)
        while _log:
            if _log.handlers:
                return log
            if not _log.propagate:
                break
            else:
                _log = _log.parent
        if sys.executable and sys.executable.endswith('pythonw.exe'):
            # this should really go to a file, but file-logging is only
            # hooked up in parallel applications
            _log_handler = logging.StreamHandler(open(os.devnull, 'w'))
        else:
            _log_handler = logging.StreamHandler()
        _log_formatter = self._log_formatter_cls(fmt=self.log_format,
                                                 datefmt=self.log_datefmt)
        _log_handler.setFormatter(_log_formatter)
        log.addHandler(_log_handler)
        return log

    #: the alias map for configurables
    #: Keys might strings or tuples for additional options; single-letter alias accessed like `-v`.
    #: Values might be like "Class.trait" strings of two-tuples: (Class.trait, help-text).
    aliases = {'log-level': 'Application.log_level'}

    # flags for loading Configurables or store_const style flags
    # flags are loaded from this dict by '--key' flags
    # this must be a dict of two-tuples, the first element being the Config/dict
    # and the second being the help string for the flag
    flags = {
        'debug': ({
            'Application': {
                'log_level': logging.DEBUG,
            },
        }, "Set log-level to debug, for the most verbose logging."),
        'show-config': ({
            'Application': {
                'show_config': True,
            },
        }, "Show the application's configuration (human-readable format)"),
        'show-config-json': ({
            'Application': {
                'show_config_json': True,
            },
        }, "Show the application's configuration (json format)"),
    }

    # subcommands for launching other applications
    # if this is not empty, this will be a parent Application
    # this must be a dict of two-tuples,
    # the first element being the application class/import string
    # and the second being the help string for the subcommand
    subcommands = Dict()
    # parse_command_line will initialize a subapp, if requested
    subapp = Instance('traitlets.config.application.Application',
                      allow_none=True)

    # extra command-line arguments that don't set config values
    extra_args = List(Unicode())

    cli_config = Instance(
        Config, (), {},
        help="""The subset of our configuration that came from the command-line

        We re-load this configuration after loading config files,
        to ensure that it maintains highest priority.
        """)

    _loaded_config_files = List()

    show_config = Bool(
        help="Instead of starting the Application, dump configuration to stdout"
    ).tag(config=True)

    show_config_json = Bool(
        help=
        "Instead of starting the Application, dump configuration to stdout (as JSON)"
    ).tag(config=True)

    @observe('show_config_json')
    def _show_config_json_changed(self, change):
        self.show_config = change.new

    @observe('show_config')
    def _show_config_changed(self, change):
        if change.new:
            self._save_start = self.start
            self.start = self.start_show_config

    def __init__(self, **kwargs):
        SingletonConfigurable.__init__(self, **kwargs)
        # Ensure my class is in self.classes, so my attributes appear in command line
        # options and config files.
        cls = self.__class__
        if cls not in self.classes:
            if self.classes is cls.classes:
                # class attr, assign instead of insert
                self.classes = [cls] + self.classes
            else:
                self.classes.insert(0, self.__class__)

    @observe('config')
    @observe_compat
    def _config_changed(self, change):
        super(Application, self)._config_changed(change)
        self.log.debug('Config changed: %r', change.new)

    @catch_config_error
    def initialize(self, argv=None):
        """Do the basic steps to configure me.

        Override in subclasses.
        """
        self.parse_command_line(argv)

    def start(self):
        """Start the app mainloop.

        Override in subclasses.
        """
        if self.subapp is not None:
            return self.subapp.start()

    def start_show_config(self):
        """start function used when show_config is True"""
        config = self.config.copy()
        # exclude show_config flags from displayed config
        for cls in self.__class__.mro():
            if cls.__name__ in config:
                cls_config = config[cls.__name__]
                cls_config.pop('show_config', None)
                cls_config.pop('show_config_json', None)

        if self.show_config_json:
            json.dump(config,
                      sys.stdout,
                      indent=1,
                      sort_keys=True,
                      default=repr)
            # add trailing newline
            sys.stdout.write('\n')
            return

        if self._loaded_config_files:
            print("Loaded config files:")
            for f in self._loaded_config_files:
                print('  ' + f)
            print()

        for classname in sorted(config):
            class_config = config[classname]
            if not class_config:
                continue
            print(classname)
            pformat_kwargs = dict(indent=4, compact=True)

            for traitname in sorted(class_config):
                value = class_config[traitname]
                print('  .{} = {}'.format(
                    traitname,
                    pprint.pformat(value, **pformat_kwargs),
                ))

    def print_alias_help(self):
        """Print the alias parts of the help."""
        print('\n'.join(self.emit_alias_help()))

    def emit_alias_help(self):
        """Yield the lines for alias part of the help."""
        if not self.aliases:
            return

        classdict = {}
        for cls in self.classes:
            # include all parents (up to, but excluding Configurable) in available names
            for c in cls.mro()[:-3]:
                classdict[c.__name__] = c

        for alias, longname in self.aliases.items():
            try:
                if isinstance(longname, tuple):
                    longname, fhelp = longname
                else:
                    fhelp = None
                classname, traitname = longname.split('.', 1)
                cls = classdict[classname]

                trait = cls.class_traits(config=True)[traitname]
                fhelp = cls.class_get_trait_help(trait,
                                                 helptext=fhelp).splitlines()

                if not isinstance(alias, tuple):
                    alias = (alias, )
                alias = sorted(alias, key=len)
                alias = ', '.join(
                    ('--%s' if len(m) > 1 else '-%s') % m for m in alias)

                # reformat first line
                fhelp[0] = fhelp[0].replace('--' + longname, alias)
                for l in fhelp:
                    yield l
                yield indent("Equivalent to: [--%s]" % longname)
            except Exception as ex:
                self.log.error(
                    'Failed collecting help-message for alias %r, due to: %s',
                    alias, ex)
                raise

    def print_flag_help(self):
        """Print the flag part of the help."""
        print('\n'.join(self.emit_flag_help()))

    def emit_flag_help(self):
        """Yield the lines for the flag part of the help."""
        if not self.flags:
            return

        for flags, (cfg, fhelp) in self.flags.items():
            try:
                if not isinstance(flags, tuple):
                    flags = (flags, )
                flags = sorted(flags, key=len)
                flags = ', '.join(
                    ('--%s' if len(m) > 1 else '-%s') % m for m in flags)
                yield flags
                yield indent(dedent(fhelp.strip()))
                cfg_list = ' '.join('--%s.%s=%s' % (clname, prop, val)
                                    for clname, props_dict in cfg.items()
                                    for prop, val in props_dict.items())
                cfg_txt = "Equivalent to: [%s]" % cfg_list
                yield indent(dedent(cfg_txt))
            except Exception as ex:
                self.log.error(
                    'Failed collecting help-message for flag %r, due to: %s',
                    flags, ex)
                raise

    def print_options(self):
        """Print the options part of the help."""
        print('\n'.join(self.emit_options_help()))

    def emit_options_help(self):
        """Yield the lines for the options part of the help."""
        if not self.flags and not self.aliases:
            return
        header = 'Options'
        yield header
        yield '=' * len(header)
        for p in wrap_paragraphs(self.option_description):
            yield p
            yield ''

        for l in self.emit_flag_help():
            yield l
        for l in self.emit_alias_help():
            yield l
        yield ''

    def print_subcommands(self):
        """Print the subcommand part of the help."""
        print('\n'.join(self.emit_subcommands_help()))

    def emit_subcommands_help(self):
        """Yield the lines for the subcommand part of the help."""
        if not self.subcommands:
            return

        header = "Subcommands"
        yield header
        yield '=' * len(header)
        for p in wrap_paragraphs(
                self.subcommand_description.format(app=self.name)):
            yield p
            yield ''
        for subc, (cls, help) in self.subcommands.items():
            yield subc
            if help:
                yield indent(dedent(help.strip()))
        yield ''

    def emit_help_epilogue(self, classes):
        """Yield the very bottom lines of the help message.

        If classes=False (the default), print `--help-all` msg.
        """
        if not classes:
            yield "To see all available configurables, use `--help-all`."
            yield ''

    def print_help(self, classes=False):
        """Print the help for each Configurable class in self.classes.

        If classes=False (the default), only flags and aliases are printed.
        """
        print('\n'.join(self.emit_help(classes=classes)))

    def emit_help(self, classes=False):
        """Yield the help-lines for each Configurable class in self.classes.

        If classes=False (the default), only flags and aliases are printed.
        """
        for l in self.emit_description():
            yield l
        for l in self.emit_subcommands_help():
            yield l
        for l in self.emit_options_help():
            yield l

        if classes:
            help_classes = self._classes_with_config_traits()
            if help_classes:
                yield "Class options"
                yield "============="
                for p in wrap_paragraphs(self.keyvalue_description):
                    yield p
                    yield ''

            for cls in help_classes:
                yield cls.class_get_help()
                yield ''
        for l in self.emit_examples():
            yield l

        for l in self.emit_help_epilogue(classes):
            yield l

    def document_config_options(self):
        """Generate rST format documentation for the config options this application

        Returns a multiline string.
        """
        return '\n'.join(c.class_config_rst_doc()
                         for c in self._classes_inc_parents())

    def print_description(self):
        """Print the application description."""
        print('\n'.join(self.emit_description()))

    def emit_description(self):
        """Yield lines with the application description."""
        for p in wrap_paragraphs(self.description or self.__doc__):
            yield p
            yield ''

    def print_examples(self):
        """Print usage and examples (see `emit_examples()`). """
        print('\n'.join(self.emit_examples()))

    def emit_examples(self):
        """Yield lines with the usage and examples.

        This usage string goes at the end of the command line help string
        and should contain examples of the application's usage.
        """
        if self.examples:
            yield "Examples"
            yield "--------"
            yield ''
            yield indent(dedent(self.examples.strip()))
            yield ''

    def print_version(self):
        """Print the version string."""
        print(self.version)

    @catch_config_error
    def initialize_subcommand(self, subc, argv=None):
        """Initialize a subcommand with argv."""
        subapp, _ = self.subcommands.get(subc)

        if isinstance(subapp, str):
            subapp = import_item(subapp)

        ## Cannot issubclass() on a non-type (SOhttp://stackoverflow.com/questions/8692430)
        if isinstance(subapp, type) and issubclass(subapp, Application):
            # Clear existing instances before...
            self.__class__.clear_instance()
            # instantiating subapp...
            self.subapp = subapp.instance(parent=self)
        elif callable(subapp):
            # or ask factory to create it...
            self.subapp = subapp(self)
        else:
            raise AssertionError("Invalid mappings for subcommand '%s'!" %
                                 subc)

        # ... and finally initialize subapp.
        self.subapp.initialize(argv)

    def flatten_flags(self):
        """Flatten flags and aliases for loaders, so cl-args override as expected.

        This prevents issues such as an alias pointing to InteractiveShell,
        but a config file setting the same trait in TerminalInteraciveShell
        getting inappropriate priority over the command-line arg.
        Also, loaders expect ``(key: longname)`` and not ````key: (longname, help)`` items.

        Only aliases with exactly one descendent in the class list
        will be promoted.

        """
        # build a tree of classes in our list that inherit from a particular
        # it will be a dict by parent classname of classes in our list
        # that are descendents
        mro_tree = defaultdict(list)
        for cls in self.classes:
            clsname = cls.__name__
            for parent in cls.mro()[1:-3]:
                # exclude cls itself and Configurable,HasTraits,object
                mro_tree[parent.__name__].append(clsname)
        # flatten aliases, which have the form:
        # { 'alias' : 'Class.trait' }
        aliases = {}
        for alias, longname in self.aliases.items():
            if isinstance(longname, tuple):
                longname, _ = longname
            cls, trait = longname.split('.', 1)
            children = mro_tree[cls]
            if len(children) == 1:
                # exactly one descendent, promote alias
                cls = children[0]
            if not isinstance(aliases, tuple):
                alias = (alias, )
            for al in alias:
                aliases[al] = '.'.join([cls, trait])

        # flatten flags, which are of the form:
        # { 'key' : ({'Cls' : {'trait' : value}}, 'help')}
        flags = {}
        for key, (flagdict, help) in self.flags.items():
            newflag = {}
            for cls, subdict in flagdict.items():
                children = mro_tree[cls]
                # exactly one descendent, promote flag section
                if len(children) == 1:
                    cls = children[0]

                if cls in newflag:
                    newflag[cls].update(subdict)
                else:
                    newflag[cls] = subdict

            if not isinstance(key, tuple):
                key = (key, )
            for k in key:
                flags[k] = (newflag, help)
        return flags, aliases

    def _create_loader(self, argv, aliases, flags, classes):
        return KVArgParseConfigLoader(argv,
                                      aliases,
                                      flags,
                                      classes=classes,
                                      log=self.log)

    @catch_config_error
    def parse_command_line(self, argv=None):
        """Parse the command line arguments."""
        assert not isinstance(argv, str)
        argv = sys.argv[1:] if argv is None else argv
        self.argv = [cast_unicode(arg) for arg in argv]

        if argv and argv[0] == 'help':
            # turn `ipython help notebook` into `ipython notebook -h`
            argv = argv[1:] + ['-h']

        if self.subcommands and len(argv) > 0:
            # we have subcommands, and one may have been specified
            subc, subargv = argv[0], argv[1:]
            if re.match(r'^\w(\-?\w)*$', subc) and subc in self.subcommands:
                # it's a subcommand, and *not* a flag or class parameter
                return self.initialize_subcommand(subc, subargv)

        # Arguments after a '--' argument are for the script IPython may be
        # about to run, not IPython iteslf. For arguments parsed here (help and
        # version), we want to only search the arguments up to the first
        # occurrence of '--', which we're calling interpreted_argv.
        try:
            interpreted_argv = argv[:argv.index('--')]
        except ValueError:
            interpreted_argv = argv

        if any(x in interpreted_argv for x in ('-h', '--help-all', '--help')):
            self.print_help('--help-all' in interpreted_argv)
            self.exit(0)

        if '--version' in interpreted_argv or '-V' in interpreted_argv:
            self.print_version()
            self.exit(0)

        # flatten flags&aliases, so cl-args get appropriate priority:
        flags, aliases = self.flatten_flags()
        classes = tuple(self._classes_with_config_traits())
        loader = self._create_loader(argv, aliases, flags, classes=classes)
        try:
            self.cli_config = deepcopy(loader.load_config())
        except SystemExit:
            # traitlets 5: no longer print help output on error
            # help output is huge, and comes after the error
            raise
        self.update_config(self.cli_config)
        # store unparsed args in extra_args
        self.extra_args = loader.extra_args

    @classmethod
    def _load_config_files(cls,
                           basefilename,
                           path=None,
                           log=None,
                           raise_config_file_errors=False):
        """Load config files (py,json) by filename and path.

        yield each config object in turn.
        """

        if not isinstance(path, list):
            path = [path]
        for path in path[::-1]:
            # path list is in descending priority order, so load files backwards:
            pyloader = cls.python_config_loader_class(basefilename + '.py',
                                                      path=path,
                                                      log=log)
            if log:
                log.debug("Looking for %s in %s", basefilename, path
                          or os.getcwd())
            jsonloader = cls.json_config_loader_class(basefilename + '.json',
                                                      path=path,
                                                      log=log)
            loaded = []
            filenames = []
            for loader in [pyloader, jsonloader]:
                config = None
                try:
                    config = loader.load_config()
                except ConfigFileNotFound:
                    pass
                except Exception:
                    # try to get the full filename, but it will be empty in the
                    # unlikely event that the error raised before filefind finished
                    filename = loader.full_filename or basefilename
                    # problem while running the file
                    if raise_config_file_errors:
                        raise
                    if log:
                        log.error("Exception while loading config file %s",
                                  filename,
                                  exc_info=True)
                else:
                    if log:
                        log.debug("Loaded config file: %s",
                                  loader.full_filename)
                if config:
                    for filename, earlier_config in zip(filenames, loaded):
                        collisions = earlier_config.collisions(config)
                        if collisions and log:
                            log.warning(
                                "Collisions detected in {0} and {1} config files."
                                " {1} has higher priority: {2}".format(
                                    filename,
                                    loader.full_filename,
                                    json.dumps(collisions, indent=2),
                                ))
                    yield (config, loader.full_filename)
                    loaded.append(config)
                    filenames.append(loader.full_filename)

    @property
    def loaded_config_files(self):
        """Currently loaded configuration files"""
        return self._loaded_config_files[:]

    @catch_config_error
    def load_config_file(self, filename, path=None):
        """Load config files by filename and path."""
        filename, ext = os.path.splitext(filename)
        new_config = Config()
        for (config, filename) in self._load_config_files(
                filename,
                path=path,
                log=self.log,
                raise_config_file_errors=self.raise_config_file_errors,
        ):
            new_config.merge(config)
            if filename not in self._loaded_config_files:  # only add to list of loaded files if not previously loaded
                self._loaded_config_files.append(filename)
        # add self.cli_config to preserve CLI config priority
        new_config.merge(self.cli_config)
        self.update_config(new_config)

    def _classes_with_config_traits(self, classes=None):
        """
        Yields only classes with configurable traits, and their subclasses.

        :param classes:
            The list of classes to iterate; if not set, uses :attr:`classes`.

        Thus, produced sample config-file will contain all classes
        on which a trait-value may be overridden:

        - either on the class owning the trait,
        - or on its subclasses, even if those subclasses do not define
          any traits themselves.
        """
        if classes is None:
            classes = self.classes

        cls_to_config = OrderedDict(
            (cls, bool(cls.class_own_traits(config=True)))
            for cls in self._classes_inc_parents(classes))

        def is_any_parent_included(cls):
            return any(b in cls_to_config and cls_to_config[b]
                       for b in cls.__bases__)

        ## Mark "empty" classes for inclusion if their parents own-traits,
        #  and loop until no more classes gets marked.
        #
        while True:
            to_incl_orig = cls_to_config.copy()
            cls_to_config = OrderedDict(
                (cls, inc_yes or is_any_parent_included(cls))
                for cls, inc_yes in cls_to_config.items())
            if cls_to_config == to_incl_orig:
                break
        for cl, inc_yes in cls_to_config.items():
            if inc_yes:
                yield cl

    def generate_config_file(self, classes=None):
        """generate default config file from Configurables"""
        lines = ["# Configuration file for %s." % self.name]
        lines.append('')
        classes = self.classes if classes is None else classes
        config_classes = list(self._classes_with_config_traits(classes))
        for cls in config_classes:
            lines.append(cls.class_config_section(config_classes))
        return '\n'.join(lines)

    def exit(self, exit_status=0):
        self.log.debug("Exiting application: %s" % self.name)
        sys.exit(exit_status)

    @classmethod
    def launch_instance(cls, argv=None, **kwargs):
        """Launch a global instance of this Application

        If a global instance already exists, this reinitializes and starts it
        """
        app = cls.instance(**kwargs)
        app.initialize(argv)
        app.start()
Example #21
0
class Application(SingletonConfigurable):
    """A singleton application with full configuration support."""

    # The name of the application, will usually match the name of the command
    # line application
    name = Unicode("application")

    # The description of the application that is printed at the beginning
    # of the help.
    description = Unicode("This is an application.")
    # default section descriptions
    option_description = Unicode(option_description)
    keyvalue_description = Unicode(keyvalue_description)
    subcommand_description = Unicode(subcommand_description)

    python_config_loader_class = PyFileConfigLoader
    json_config_loader_class = JSONFileConfigLoader

    # The usage and example string that goes at the end of the help string.
    examples = Unicode()

    # A sequence of Configurable subclasses whose config=True attributes will
    # be exposed at the command line.
    classes: t.List[t.Type[t.Any]] = []

    def _classes_inc_parents(self, classes=None):
        """Iterate through configurable classes, including configurable parents

        :param classes:
            The list of classes to iterate; if not set, uses :attr:`classes`.

        Children should always be after parents, and each class should only be
        yielded once.
        """
        if classes is None:
            classes = self.classes

        seen = set()
        for c in classes:
            # We want to sort parents before children, so we reverse the MRO
            for parent in reversed(c.mro()):
                if issubclass(parent, Configurable) and (parent not in seen):
                    seen.add(parent)
                    yield parent

    # The version string of this application.
    version = Unicode("0.0")

    # the argv used to initialize the application
    argv = List()

    # Whether failing to load config files should prevent startup
    raise_config_file_errors = Bool(TRAITLETS_APPLICATION_RAISE_CONFIG_FILE_ERROR)

    # The log level for the application
    log_level = Enum(
        (0, 10, 20, 30, 40, 50, "DEBUG", "INFO", "WARN", "ERROR", "CRITICAL"),
        default_value=logging.WARN,
        help="Set the log level by value or name.",
    ).tag(config=True)

    _log_formatter_cls = LevelFormatter

    log_datefmt = Unicode(
        "%Y-%m-%d %H:%M:%S", help="The date format used by logging formatters for %(asctime)s"
    ).tag(config=True)

    log_format = Unicode(
        "[%(name)s]%(highlevel)s %(message)s",
        help="The Logging format template",
    ).tag(config=True)

    def get_default_logging_config(self):
        """Return the base logging configuration.

        The default is to log to stderr using a StreamHandler, if no default
        handler already exists.

        The log handler level starts at logging.WARN, but this can be adjusted
        by setting the ``log_level`` attribute.

        The ``logging_config`` trait is merged into this allowing for finer
        control of logging.

        """
        config: t.Dict[str, t.Any] = {
            "version": 1,
            "handlers": {
                "console": {
                    "class": "logging.StreamHandler",
                    "formatter": "console",
                    "level": logging.getLevelName(self.log_level),
                    "stream": "ext://sys.stderr",
                },
            },
            "formatters": {
                "console": {
                    "class": (
                        f"{self._log_formatter_cls.__module__}"
                        f".{self._log_formatter_cls.__name__}"
                    ),
                    "format": self.log_format,
                    "datefmt": self.log_datefmt,
                },
            },
            "loggers": {
                self.__class__.__name__: {
                    "level": "DEBUG",
                    "handlers": ["console"],
                }
            },
            "disable_existing_loggers": False,
        }

        if sys.executable and sys.executable.endswith("pythonw.exe"):
            # disable logging
            # (this should really go to a file, but file-logging is only
            # hooked up in parallel applications)
            del config["handlers"]["loggers"]

        return config

    @observe("log_datefmt", "log_format", "log_level", "logging_config")
    def _observe_logging_change(self, change):
        # convert log level strings to ints
        log_level = self.log_level
        if isinstance(log_level, str):
            self.log_level = getattr(logging, log_level)
        self._configure_logging()

    @observe("log", type="default")
    def _observe_logging_default(self, change):
        self._configure_logging()

    def _configure_logging(self):
        config = self.get_default_logging_config()
        nested_update(config, self.logging_config or {})
        dictConfig(config)
        # make a note that we have configured logging
        self._logging_configured = True

    @default("log")
    def _log_default(self):
        """Start logging for this application."""
        log = logging.getLogger(self.__class__.__name__)
        log.propagate = False
        _log = log  # copied from Logger.hasHandlers() (new in Python 3.2)
        while _log:
            if _log.handlers:
                return log
            if not _log.propagate:
                break
            else:
                _log = _log.parent  # type:ignore[assignment]
        return log

    logging_config = Dict(
        help="""
            Configure additional log handlers.

            The default stderr logs handler is configured by the
            log_level, log_datefmt and log_format settings.

            This configuration can be used to configure additional handlers
            (e.g. to output the log to a file) or for finer control over the
            default handlers.

            If provided this should be a logging configuration dictionary, for
            more information see:
            https://docs.python.org/3/library/logging.config.html#logging-config-dictschema

            This dictionary is merged with the base logging configuration which
            defines the following:

            * A logging formatter intended for interactive use called
              ``console``.
            * A logging handler that writes to stderr called
              ``console`` which uses the formatter ``console``.
            * A logger with the name of this application set to ``DEBUG``
              level.

            This example adds a new handler that writes to a file:

            .. code-block:: python

               c.Application.logging_configuration = {
                   'handlers': {
                       'file': {
                           'class': 'logging.FileHandler',
                           'level': 'DEBUG',
                           'filename': '<path/to/file>',
                       }
                   },
                   'loggers': {
                       '<application-name>': {
                           'level': 'DEBUG',
                           # NOTE: if you don't list the default "console"
                           # handler here then it will be disabled
                           'handlers': ['console', 'file'],
                       },
                   }
               }

        """,
    ).tag(config=True)

    #: the alias map for configurables
    #: Keys might strings or tuples for additional options; single-letter alias accessed like `-v`.
    #: Values might be like "Class.trait" strings of two-tuples: (Class.trait, help-text).
    aliases: t.Dict[str, str] = {"log-level": "Application.log_level"}

    # flags for loading Configurables or store_const style flags
    # flags are loaded from this dict by '--key' flags
    # this must be a dict of two-tuples, the first element being the Config/dict
    # and the second being the help string for the flag
    flags: t.Dict[str, t.Any] = {
        "debug": (
            {
                "Application": {
                    "log_level": logging.DEBUG,
                },
            },
            "Set log-level to debug, for the most verbose logging.",
        ),
        "show-config": (
            {
                "Application": {
                    "show_config": True,
                },
            },
            "Show the application's configuration (human-readable format)",
        ),
        "show-config-json": (
            {
                "Application": {
                    "show_config_json": True,
                },
            },
            "Show the application's configuration (json format)",
        ),
    }

    # subcommands for launching other applications
    # if this is not empty, this will be a parent Application
    # this must be a dict of two-tuples,
    # the first element being the application class/import string
    # and the second being the help string for the subcommand
    subcommands = Dict()
    # parse_command_line will initialize a subapp, if requested
    subapp = Instance("traitlets.config.application.Application", allow_none=True)

    # extra command-line arguments that don't set config values
    extra_args = List(Unicode())

    cli_config = Instance(
        Config,
        (),
        {},
        help="""The subset of our configuration that came from the command-line

        We re-load this configuration after loading config files,
        to ensure that it maintains highest priority.
        """,
    )

    _loaded_config_files = List()

    show_config = Bool(
        help="Instead of starting the Application, dump configuration to stdout"
    ).tag(config=True)

    show_config_json = Bool(
        help="Instead of starting the Application, dump configuration to stdout (as JSON)"
    ).tag(config=True)

    @observe("show_config_json")
    def _show_config_json_changed(self, change):
        self.show_config = change.new

    @observe("show_config")
    def _show_config_changed(self, change):
        if change.new:
            self._save_start = self.start
            self.start = self.start_show_config  # type:ignore[assignment]

    def __init__(self, **kwargs):
        SingletonConfigurable.__init__(self, **kwargs)
        # Ensure my class is in self.classes, so my attributes appear in command line
        # options and config files.
        cls = self.__class__
        if cls not in self.classes:
            if self.classes is cls.classes:
                # class attr, assign instead of insert
                self.classes = [cls] + self.classes
            else:
                self.classes.insert(0, self.__class__)

    @observe("config")
    @observe_compat
    def _config_changed(self, change):
        super()._config_changed(change)
        self.log.debug("Config changed: %r", change.new)

    @catch_config_error
    def initialize(self, argv=None):
        """Do the basic steps to configure me.

        Override in subclasses.
        """
        self.parse_command_line(argv)

    def start(self):
        """Start the app mainloop.

        Override in subclasses.
        """
        if self.subapp is not None:
            return self.subapp.start()

    def start_show_config(self):
        """start function used when show_config is True"""
        config = self.config.copy()
        # exclude show_config flags from displayed config
        for cls in self.__class__.mro():
            if cls.__name__ in config:
                cls_config = config[cls.__name__]
                cls_config.pop("show_config", None)
                cls_config.pop("show_config_json", None)

        if self.show_config_json:
            json.dump(config, sys.stdout, indent=1, sort_keys=True, default=repr)
            # add trailing newline
            sys.stdout.write("\n")
            return

        if self._loaded_config_files:
            print("Loaded config files:")
            for f in self._loaded_config_files:
                print("  " + f)
            print()

        for classname in sorted(config):
            class_config = config[classname]
            if not class_config:
                continue
            print(classname)
            pformat_kwargs: t.Dict[str, t.Any] = dict(indent=4, compact=True)

            for traitname in sorted(class_config):
                value = class_config[traitname]
                print(
                    "  .{} = {}".format(
                        traitname,
                        pprint.pformat(value, **pformat_kwargs),
                    )
                )

    def print_alias_help(self):
        """Print the alias parts of the help."""
        print("\n".join(self.emit_alias_help()))

    def emit_alias_help(self):
        """Yield the lines for alias part of the help."""
        if not self.aliases:
            return

        classdict = {}
        for cls in self.classes:
            # include all parents (up to, but excluding Configurable) in available names
            for c in cls.mro()[:-3]:
                classdict[c.__name__] = c

        for alias, longname in self.aliases.items():
            try:
                if isinstance(longname, tuple):
                    longname, fhelp = longname
                else:
                    fhelp = None
                classname, traitname = longname.split(".")[-2:]
                longname = classname + "." + traitname
                cls = classdict[classname]

                trait = cls.class_traits(config=True)[traitname]
                fhelp = cls.class_get_trait_help(trait, helptext=fhelp).splitlines()

                if not isinstance(alias, tuple):
                    alias = (alias,)  # type:ignore[assignment]
                alias = sorted(alias, key=len)  # type:ignore[assignment]
                alias = ", ".join(("--%s" if len(m) > 1 else "-%s") % m for m in alias)

                # reformat first line
                fhelp[0] = fhelp[0].replace("--" + longname, alias)
                yield from fhelp
                yield indent("Equivalent to: [--%s]" % longname)
            except Exception as ex:
                self.log.error("Failed collecting help-message for alias %r, due to: %s", alias, ex)
                raise

    def print_flag_help(self):
        """Print the flag part of the help."""
        print("\n".join(self.emit_flag_help()))

    def emit_flag_help(self):
        """Yield the lines for the flag part of the help."""
        if not self.flags:
            return

        for flags, (cfg, fhelp) in self.flags.items():
            try:
                if not isinstance(flags, tuple):
                    flags = (flags,)  # type:ignore[assignment]
                flags = sorted(flags, key=len)  # type:ignore[assignment]
                flags = ", ".join(("--%s" if len(m) > 1 else "-%s") % m for m in flags)
                yield flags
                yield indent(dedent(fhelp.strip()))
                cfg_list = " ".join(
                    f"--{clname}.{prop}={val}"
                    for clname, props_dict in cfg.items()
                    for prop, val in props_dict.items()
                )
                cfg_txt = "Equivalent to: [%s]" % cfg_list
                yield indent(dedent(cfg_txt))
            except Exception as ex:
                self.log.error("Failed collecting help-message for flag %r, due to: %s", flags, ex)
                raise

    def print_options(self):
        """Print the options part of the help."""
        print("\n".join(self.emit_options_help()))

    def emit_options_help(self):
        """Yield the lines for the options part of the help."""
        if not self.flags and not self.aliases:
            return
        header = "Options"
        yield header
        yield "=" * len(header)
        for p in wrap_paragraphs(self.option_description):
            yield p
            yield ""

        yield from self.emit_flag_help()
        yield from self.emit_alias_help()
        yield ""

    def print_subcommands(self):
        """Print the subcommand part of the help."""
        print("\n".join(self.emit_subcommands_help()))

    def emit_subcommands_help(self):
        """Yield the lines for the subcommand part of the help."""
        if not self.subcommands:
            return

        header = "Subcommands"
        yield header
        yield "=" * len(header)
        for p in wrap_paragraphs(self.subcommand_description.format(app=self.name)):
            yield p
            yield ""
        for subc, (_, help) in self.subcommands.items():
            yield subc
            if help:
                yield indent(dedent(help.strip()))
        yield ""

    def emit_help_epilogue(self, classes):
        """Yield the very bottom lines of the help message.

        If classes=False (the default), print `--help-all` msg.
        """
        if not classes:
            yield "To see all available configurables, use `--help-all`."
            yield ""

    def print_help(self, classes=False):
        """Print the help for each Configurable class in self.classes.

        If classes=False (the default), only flags and aliases are printed.
        """
        print("\n".join(self.emit_help(classes=classes)))

    def emit_help(self, classes=False):
        """Yield the help-lines for each Configurable class in self.classes.

        If classes=False (the default), only flags and aliases are printed.
        """
        yield from self.emit_description()
        yield from self.emit_subcommands_help()
        yield from self.emit_options_help()

        if classes:
            help_classes = self._classes_with_config_traits()
            if help_classes:
                yield "Class options"
                yield "============="
                for p in wrap_paragraphs(self.keyvalue_description):
                    yield p
                    yield ""

            for cls in help_classes:
                yield cls.class_get_help()
                yield ""
        yield from self.emit_examples()

        yield from self.emit_help_epilogue(classes)

    def document_config_options(self):
        """Generate rST format documentation for the config options this application

        Returns a multiline string.
        """
        return "\n".join(c.class_config_rst_doc() for c in self._classes_inc_parents())

    def print_description(self):
        """Print the application description."""
        print("\n".join(self.emit_description()))

    def emit_description(self):
        """Yield lines with the application description."""
        for p in wrap_paragraphs(self.description or self.__doc__ or ""):
            yield p
            yield ""

    def print_examples(self):
        """Print usage and examples (see `emit_examples()`)."""
        print("\n".join(self.emit_examples()))

    def emit_examples(self):
        """Yield lines with the usage and examples.

        This usage string goes at the end of the command line help string
        and should contain examples of the application's usage.
        """
        if self.examples:
            yield "Examples"
            yield "--------"
            yield ""
            yield indent(dedent(self.examples.strip()))
            yield ""

    def print_version(self):
        """Print the version string."""
        print(self.version)

    @catch_config_error
    def initialize_subcommand(self, subc, argv=None):
        """Initialize a subcommand with argv."""
        subapp, _ = self.subcommands.get(subc)

        if isinstance(subapp, str):
            subapp = import_item(subapp)

        # Cannot issubclass() on a non-type (SOhttp://stackoverflow.com/questions/8692430)
        if isinstance(subapp, type) and issubclass(subapp, Application):
            # Clear existing instances before...
            self.__class__.clear_instance()
            # instantiating subapp...
            self.subapp = subapp.instance(parent=self)
        elif callable(subapp):
            # or ask factory to create it...
            self.subapp = subapp(self)  # type:ignore[call-arg]
        else:
            raise AssertionError("Invalid mappings for subcommand '%s'!" % subc)

        # ... and finally initialize subapp.
        self.subapp.initialize(argv)

    def flatten_flags(self):
        """Flatten flags and aliases for loaders, so cl-args override as expected.

        This prevents issues such as an alias pointing to InteractiveShell,
        but a config file setting the same trait in TerminalInteraciveShell
        getting inappropriate priority over the command-line arg.
        Also, loaders expect ``(key: longname)`` and not ````key: (longname, help)`` items.

        Only aliases with exactly one descendent in the class list
        will be promoted.

        """
        # build a tree of classes in our list that inherit from a particular
        # it will be a dict by parent classname of classes in our list
        # that are descendents
        mro_tree = defaultdict(list)
        for cls in self.classes:
            clsname = cls.__name__
            for parent in cls.mro()[1:-3]:
                # exclude cls itself and Configurable,HasTraits,object
                mro_tree[parent.__name__].append(clsname)
        # flatten aliases, which have the form:
        # { 'alias' : 'Class.trait' }
        aliases: t.Dict[str, str] = {}
        for alias, longname in self.aliases.items():
            if isinstance(longname, tuple):
                longname, _ = longname
            cls, trait = longname.split(".", 1)  # type:ignore[assignment]
            children = mro_tree[cls]  # type:ignore[index]
            if len(children) == 1:
                # exactly one descendent, promote alias
                cls = children[0]  # type:ignore[assignment]
            if not isinstance(aliases, tuple):
                alias = (alias,)  # type:ignore[assignment]
            for al in alias:
                aliases[al] = ".".join([cls, trait])  # type:ignore[list-item]

        # flatten flags, which are of the form:
        # { 'key' : ({'Cls' : {'trait' : value}}, 'help')}
        flags = {}
        for key, (flagdict, help) in self.flags.items():
            newflag: t.Dict[t.Any, t.Any] = {}
            for cls, subdict in flagdict.items():
                children = mro_tree[cls]  # type:ignore[index]
                # exactly one descendent, promote flag section
                if len(children) == 1:
                    cls = children[0]  # type:ignore[assignment]

                if cls in newflag:
                    newflag[cls].update(subdict)
                else:
                    newflag[cls] = subdict

            if not isinstance(key, tuple):
                key = (key,)  # type:ignore[assignment]
            for k in key:
                flags[k] = (newflag, help)
        return flags, aliases

    def _create_loader(self, argv, aliases, flags, classes):
        return KVArgParseConfigLoader(argv, aliases, flags, classes=classes, log=self.log)

    @catch_config_error
    def parse_command_line(self, argv=None):
        """Parse the command line arguments."""
        assert not isinstance(argv, str)
        argv = sys.argv[1:] if argv is None else argv
        self.argv = [cast_unicode(arg) for arg in argv]

        if argv and argv[0] == "help":
            # turn `ipython help notebook` into `ipython notebook -h`
            argv = argv[1:] + ["-h"]

        if self.subcommands and len(argv) > 0:
            # we have subcommands, and one may have been specified
            subc, subargv = argv[0], argv[1:]
            if re.match(r"^\w(\-?\w)*$", subc) and subc in self.subcommands:
                # it's a subcommand, and *not* a flag or class parameter
                return self.initialize_subcommand(subc, subargv)

        # Arguments after a '--' argument are for the script IPython may be
        # about to run, not IPython iteslf. For arguments parsed here (help and
        # version), we want to only search the arguments up to the first
        # occurrence of '--', which we're calling interpreted_argv.
        try:
            interpreted_argv = argv[: argv.index("--")]
        except ValueError:
            interpreted_argv = argv

        if any(x in interpreted_argv for x in ("-h", "--help-all", "--help")):
            self.print_help("--help-all" in interpreted_argv)
            self.exit(0)

        if "--version" in interpreted_argv or "-V" in interpreted_argv:
            self.print_version()
            self.exit(0)

        # flatten flags&aliases, so cl-args get appropriate priority:
        flags, aliases = self.flatten_flags()
        classes = tuple(self._classes_with_config_traits())
        loader = self._create_loader(argv, aliases, flags, classes=classes)
        try:
            self.cli_config = deepcopy(loader.load_config())
        except SystemExit:
            # traitlets 5: no longer print help output on error
            # help output is huge, and comes after the error
            raise
        self.update_config(self.cli_config)
        # store unparsed args in extra_args
        self.extra_args = loader.extra_args

    @classmethod
    def _load_config_files(cls, basefilename, path=None, log=None, raise_config_file_errors=False):
        """Load config files (py,json) by filename and path.

        yield each config object in turn.
        """

        if not isinstance(path, list):
            path = [path]
        for path in path[::-1]:
            # path list is in descending priority order, so load files backwards:
            pyloader = cls.python_config_loader_class(basefilename + ".py", path=path, log=log)
            if log:
                log.debug("Looking for %s in %s", basefilename, path or os.getcwd())
            jsonloader = cls.json_config_loader_class(basefilename + ".json", path=path, log=log)
            loaded: t.List[t.Any] = []
            filenames: t.List[str] = []
            for loader in [pyloader, jsonloader]:
                config = None
                try:
                    config = loader.load_config()
                except ConfigFileNotFound:
                    pass
                except Exception:
                    # try to get the full filename, but it will be empty in the
                    # unlikely event that the error raised before filefind finished
                    filename = loader.full_filename or basefilename
                    # problem while running the file
                    if raise_config_file_errors:
                        raise
                    if log:
                        log.error("Exception while loading config file %s", filename, exc_info=True)
                else:
                    if log:
                        log.debug("Loaded config file: %s", loader.full_filename)
                if config:
                    for filename, earlier_config in zip(filenames, loaded):
                        collisions = earlier_config.collisions(config)
                        if collisions and log:
                            log.warning(
                                "Collisions detected in {0} and {1} config files."
                                " {1} has higher priority: {2}".format(
                                    filename,
                                    loader.full_filename,
                                    json.dumps(collisions, indent=2),
                                )
                            )
                    yield (config, loader.full_filename)
                    loaded.append(config)
                    filenames.append(loader.full_filename)

    @property
    def loaded_config_files(self):
        """Currently loaded configuration files"""
        return self._loaded_config_files[:]

    @catch_config_error
    def load_config_file(self, filename, path=None):
        """Load config files by filename and path."""
        filename, ext = os.path.splitext(filename)
        new_config = Config()
        for (config, filename) in self._load_config_files(
            filename,
            path=path,
            log=self.log,
            raise_config_file_errors=self.raise_config_file_errors,
        ):
            new_config.merge(config)
            if (
                filename not in self._loaded_config_files
            ):  # only add to list of loaded files if not previously loaded
                self._loaded_config_files.append(filename)
        # add self.cli_config to preserve CLI config priority
        new_config.merge(self.cli_config)
        self.update_config(new_config)

    def _classes_with_config_traits(self, classes=None):
        """
        Yields only classes with configurable traits, and their subclasses.

        :param classes:
            The list of classes to iterate; if not set, uses :attr:`classes`.

        Thus, produced sample config-file will contain all classes
        on which a trait-value may be overridden:

        - either on the class owning the trait,
        - or on its subclasses, even if those subclasses do not define
          any traits themselves.
        """
        if classes is None:
            classes = self.classes

        cls_to_config = OrderedDict(
            (cls, bool(cls.class_own_traits(config=True)))
            for cls in self._classes_inc_parents(classes)
        )

        def is_any_parent_included(cls):
            return any(b in cls_to_config and cls_to_config[b] for b in cls.__bases__)

        # Mark "empty" classes for inclusion if their parents own-traits,
        #  and loop until no more classes gets marked.
        #
        while True:
            to_incl_orig = cls_to_config.copy()
            cls_to_config = OrderedDict(
                (cls, inc_yes or is_any_parent_included(cls))
                for cls, inc_yes in cls_to_config.items()
            )
            if cls_to_config == to_incl_orig:
                break
        for cl, inc_yes in cls_to_config.items():
            if inc_yes:
                yield cl

    def generate_config_file(self, classes=None):
        """generate default config file from Configurables"""
        lines = ["# Configuration file for %s." % self.name]
        lines.append("")
        classes = self.classes if classes is None else classes
        config_classes = list(self._classes_with_config_traits(classes))
        for cls in config_classes:
            lines.append(cls.class_config_section(config_classes))
        return "\n".join(lines)

    def close_handlers(self):
        if getattr(self, "_logging_configured", False):
            # don't attempt to close handlers unless they have been opened
            # (note accessing self.log.handlers will create handlers if they
            # have not yet been initialised)
            for handler in self.log.handlers:
                with suppress(Exception):
                    handler.close()
            self._logging_configured = False

    def exit(self, exit_status=0):
        self.log.debug("Exiting application: %s" % self.name)
        self.close_handlers()
        sys.exit(exit_status)

    def __del__(self):
        self.close_handlers()

    @classmethod
    def launch_instance(cls, argv=None, **kwargs):
        """Launch a global instance of this Application

        If a global instance already exists, this reinitializes and starts it
        """
        app = cls.instance(**kwargs)
        app.initialize(argv)
        app.start()
Example #22
0
class Bar(Configurable):

    b = Integer(0, help="The integer b.").tag(config=True)
    enabled = Bool(True, help="Enable bar.").tag(config=True)
class FirstUseAuthenticator(Authenticator):
    """
    JupyterHub authenticator that lets users set password on first use.
    """
    dbm_path = Unicode(
        'passwords.dbm',
        config=True,
        help="""
        Path to store the db file with username / pwd hash in
        """
    )
    
    registration_path = Unicode(
        'registration.dbm',
        config=True,
        help="""
        Path to store the db file with username / registration code hash in
        """
    )
    
    email_path = Unicode(
        'email.dbm',
        config=True,
        help="""
        Path to store the db file with username / email hash in
        """
    )
    
    smtp_server = Unicode(
        'mailrouter.man.ac.uk',
        config=True,
        help="""
        Email server
        """
    )
    
    from_address = Unicode(
        '*****@*****.**',
        config=True,
        help="""
        Email from address
        """
    )
    
    register_confirm_address = Unicode(
        'https://spinn-20.cs.man.ac.uk/hub/confirm',
        config=True,
        help="""
        URL to confirm email address
        """
    )

    create_users = Bool(
        False,
        config=True,
        help="""
        Create users if they do not exist already.

        When set to false, users would have to be explicitly created before
        they can log in. Users can be created via the admin panel or by setting
        whitelist / admin list.
        """
    )

    def _user_exists(self, username):
        """
        Return true if given user already exists.

        Note: Depends on internal details of JupyterHub that might change
        across versions. Tested with v0.9
        """
        with dbm.open(self.dbm_path, 'c', 0o600) as db:
            return username in db

    def validate_username(self, name):
        invalid_chars = [',', ' ']
        if any((char in name) for char in invalid_chars):
            return False
        return super().validate_username(name)

    @gen.coroutine
    def authenticate(self, handler, data):
        username = data['username']

        if not self.create_users:
            if not self._user_exists(username):
                return None
                
        with dbm.open(self.registration_path, 'c', 0o600) as db:
            if username in db:
                self.log.info("User {} has not verified their email address".format(username))

        password = data['password']
        with dbm.open(self.dbm_path, 'c', 0o600) as db:
            stored_pw = db.get(username.encode(), None)
            if stored_pw is not None:
                if bcrypt.hashpw(password.encode(), stored_pw) != stored_pw:
                    return None
            else:
                db[username] = bcrypt.hashpw(password.encode(),
                                             bcrypt.gensalt())
        return username

    def delete_user(self, user):
        """
        When user is deleted, remove their entry from password db.

        This lets passwords be reset by deleting users.
        """
        try:
            with dbm.open(self.dbm_path, 'c', 0o600) as db:
                del db[user.name]
        except KeyError as k:
            pass

    def reset_password(self, username, new_password):
        """
        This allow to change password of a logged user.
        """
        with dbm.open(self.dbm_path, 'c', 0o600) as db:
            db[username] = bcrypt.hashpw(new_password.encode(),
                                         bcrypt.gensalt())
        return username

    def get_handlers(self, app):
        h = [(r'/auth/change-password', ResetPasswordHandler),
             (r'/auth/register', RegisterHandler),
             (r'/confirm', RegisterConfirmHandler)]
        return super().get_handlers(app) + h