class SimulationBase(CrowdDynamicsObject): # TODO: timezone information: tzutc? json serialization. timestamp = Instance( datetime, help='Timestamp of current utc time. Defaults to time when object was ' 'first created.') metadata = Instance(klass=OrderedDict, args=(), help='Simulation metadata as dictionary.') data = Instance( klass=OrderedDict, args=(), help="Generated simulation data that is shared between logic nodes. " "This should be data that can be updated and should be saved on " "every iteration.") exit_condition = Instance(Callable, allow_none=True, help='') def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.metadata['name'] = self.name self.metadata['timestamp'] = str(self.timestamp) @default('timestamp') def _default_timestamp(self): return datetime.now(tz=tzutc()) @property def name_with_timestamp(self): """Name with timestamp.""" return '_'.join((self.name, str(self.timestamp).replace(' ', '_'))) def update(self): raise NotImplementedError def run(self): """Updates simulation until exit condition is met (returns True).""" #while (self.data['target_0'] + self.data['target_1'] < self.data['n_agents']*1.00): while (self.data['target_0'] + self.data['target_1'] + self.data['target_2'] + self.data['target_3'] < self.data['n_agents'] * 1.00): #if (self.data['time_tot'] > 80): #print(self.agents.array[np.nonzero(~self.agents.array['target_reached'])]) #print(self.data['target_0'] + self.data['target_1'] + self.data['target_2'] + # self.data['target_3'] + self.data['target_4'] + self.data['target_5']) #sys.stdout.flush() #print(self.data['time_tot']) #sys.stdout.flush() self.update()
class GuiCommunication(LogicNode): """Communication between the GUI and simulation.""" queue = Instance(klass=type(Queue()), allow_none=True) def update(self, *args, **kwargs): self.queue.put( Message(agents=np.copy(self.simulation.agents.array), data=self.simulation.data))
class AgentGroup(HasTraits): """Group of agents Examples: >>> group = AgentGroup( >>> size=10, >>> agent_type=Circular, >>> attributes=..., >>> ) """ agent_type = Type(AgentType, allow_none=True, help='AgentType for generating agent from attributes.') size = Int( default_value=0, help='Size of the agent group. Optional is attributes are instance of ' 'collection') attributes = Union( (Instance(Collection), Instance(Generator), Instance(Callable)), allow_none=True, help='Attributes of the chosen agent type.') members = List(Instance(AgentType), help='') @observe('size', 'agent_type', 'attributes') def _observe_members(self, change): if self.size > 0 and self.attributes is not None and self.agent_type is not None: if isinstance(self.attributes, Collection): self.members = [self.agent_type(**a) for a in self.attributes] elif isinstance(self.attributes, Generator): self.members = [ self.agent_type(**next(self.attributes)) for _ in range(self.size) ] elif isinstance(self.attributes, Callable): self.members = [ self.agent_type(**self.attributes()) for _ in range(self.size) ] else: raise TraitError
class LoggingConfigurable(Configurable): """A parent class for Configurables that log. Subclasses have a log trait, and the default behavior is to get the logger from the currently running Application. """ log = Instance('logging.Logger') def _log_default(self): from traitlets import log return log.get_logger()
class SimulationBase(CrowdDynamicsObject): # TODO: timezone information: tzutc? json serialization. timestamp = Instance( datetime, help='Timestamp of current utc time. Defaults to time when object was ' 'first created.') metadata = Instance(klass=OrderedDict, args=(), help='Simulation metadata as dictionary.') data = Instance( klass=OrderedDict, args=(), help="Generated simulation data that is shared between logic nodes. " "This should be data that can be updated and should be saved on " "every iteration.") exit_condition = Instance(Callable, allow_none=True, help='') def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.metadata['name'] = self.name self.metadata['timestamp'] = str(self.timestamp) @default('timestamp') def _default_timestamp(self): return datetime.now(tz=tzutc()) @property def name_with_timestamp(self): """Name with timestamp.""" return '_'.join((self.name, str(self.timestamp).replace(' ', '_'))) def update(self): raise NotImplementedError def run(self): """Updates simulation until exit condition is met (returns True).""" while self.exit_condition is None or not self.exit_condition(self): self.update()
class HydraKernelManager(AsyncKernelManager): client_factory = HydraKernelClient @default("kernel_spec_manager") def _default_kernel_spec_manager(self): return HydraKernelSpecManager(parent=self) binding = Instance(Binding) # TODO: refresh kernel spec manager (??) if the binding connection changes. # Not sure if that is really necessary. @property def kernel_name(self): """Override kernel name to refer to subkernel""" return self.binding.kernel
class Configurable(HasTraits): config = Instance(Config, (), {}) parent = Instance('traitlets.config.configurable.Configurable', allow_none=True) def __init__(self, **kwargs): """Create a configurable given a config config. Parameters ---------- config : Config If this is empty, default values are used. If config is a :class:`Config` instance, it will be used to configure the instance. parent : Configurable instance, optional The parent Configurable instance of this object. Notes ----- Subclasses of Configurable must call the :meth:`__init__` method of :class:`Configurable` *before* doing anything else and using :func:`super`:: class MyConfigurable(Configurable): def __init__(self, config=None): super(MyConfigurable, self).__init__(config=config) # Then any other code you need to finish initialization. This ensures that instances will be configured properly. """ parent = kwargs.pop('parent', None) if parent is not None: # config is implied from parent if kwargs.get('config', None) is None: kwargs['config'] = parent.config self.parent = parent config = kwargs.pop('config', None) # load kwarg traits, other than config super(Configurable, self).__init__(**kwargs) # load config if config is not None: # We used to deepcopy, but for now we are trying to just save # by reference. This *could* have side effects as all components # will share config. In fact, I did find such a side effect in # _config_changed below. If a config attribute value was a mutable type # all instances of a component were getting the same copy, effectively # making that a class attribute. # self.config = deepcopy(config) self.config = config else: # allow _config_default to return something self._load_config(self.config) # Ensure explicit kwargs are applied after loading config. # This is usually redundant, but ensures config doesn't override # explicitly assigned values. for key, value in kwargs.items(): setattr(self, key, value) #------------------------------------------------------------------------- # Static trait notifiations #------------------------------------------------------------------------- @classmethod def section_names(cls): """return section names as a list""" return [ c.__name__ for c in reversed(cls.__mro__) if issubclass(c, Configurable) and issubclass(cls, c) ] def _find_my_config(self, cfg): """extract my config from a global Config object will construct a Config object of only the config values that apply to me based on my mro(), as well as those of my parent(s) if they exist. If I am Bar and my parent is Foo, and their parent is Tim, this will return merge following config sections, in this order:: [Bar, Foo.bar, Tim.Foo.Bar] With the last item being the highest priority. """ cfgs = [cfg] if self.parent: cfgs.append(self.parent._find_my_config(cfg)) my_config = Config() for c in cfgs: for sname in self.section_names(): # Don't do a blind getattr as that would cause the config to # dynamically create the section with name Class.__name__. if c._has_section(sname): my_config.merge(c[sname]) return my_config def _load_config(self, cfg, section_names=None, traits=None): """load traits from a Config object""" if traits is None: traits = self.traits(config=True) if section_names is None: section_names = self.section_names() my_config = self._find_my_config(cfg) # hold trait notifications until after all config has been loaded with self.hold_trait_notifications(): for name, config_value in my_config.items(): if name in traits: if isinstance(config_value, LazyConfigValue): # ConfigValue is a wrapper for using append / update on containers # without having to copy the initial value initial = getattr(self, name) config_value = config_value.get_value(initial) # We have to do a deepcopy here if we don't deepcopy the entire # config object. If we don't, a mutable config_value will be # shared by all instances, effectively making it a class attribute. setattr(self, name, deepcopy(config_value)) elif not _is_section_key(name) and not isinstance( config_value, Config): from difflib import get_close_matches if isinstance(self, LoggingConfigurable): warn = self.log.warning else: warn = lambda msg: warnings.warn(msg, stacklevel=9) matches = get_close_matches(name, traits) msg = u"Config option `{option}` not recognized by `{klass}`.".format( option=name, klass=self.__class__.__name__) if len(matches) == 1: msg += u" Did you mean `{matches}`?".format( matches=matches[0]) elif len(matches) >= 1: msg += " Did you mean one of: `{matches}`?".format( matches=', '.join(sorted(matches))) warn(msg) @observe('config') @observe_compat def _config_changed(self, change): """Update all the class traits having ``config=True`` in metadata. For any class trait with a ``config`` metadata attribute that is ``True``, we update the trait with the value of the corresponding config entry. """ # Get all traits with a config metadata entry that is True traits = self.traits(config=True) # We auto-load config section for this class as well as any parent # classes that are Configurable subclasses. This starts with Configurable # and works down the mro loading the config for each section. section_names = self.section_names() self._load_config(change.new, traits=traits, section_names=section_names) def update_config(self, config): """Update config and load the new values""" # load config self._load_config(config) # merge it into self.config self.config.merge(config) # TODO: trigger change event if/when dict-update change events take place # DO NOT trigger full trait-change @classmethod def class_get_help(cls, inst=None): """Get the help string for this class in ReST format. If `inst` is given, it's current trait values will be used in place of class defaults. """ assert inst is None or isinstance(inst, cls) final_help = [] final_help.append(u'%s options' % cls.__name__) final_help.append(len(final_help[0]) * u'-') for k, v in sorted(cls.class_traits(config=True).items()): help = cls.class_get_trait_help(v, inst) final_help.append(help) return '\n'.join(final_help) @classmethod def class_get_trait_help(cls, trait, inst=None): """Get the help string for a single trait. If `inst` is given, it's current trait values will be used in place of the class default. """ assert inst is None or isinstance(inst, cls) lines = [] header = "--%s.%s=<%s>" % (cls.__name__, trait.name, trait.__class__.__name__) lines.append(header) if inst is not None: lines.append(indent('Current: %r' % getattr(inst, trait.name), 4)) else: try: dvr = trait.default_value_repr() except Exception: dvr = None # ignore defaults we can't construct if dvr is not None: if len(dvr) > 64: dvr = dvr[:61] + '...' lines.append(indent('Default: %s' % dvr, 4)) if 'Enum' in trait.__class__.__name__: # include Enum choices lines.append(indent('Choices: %r' % (trait.values, ))) help = trait.help if help != '': help = '\n'.join(wrap_paragraphs(help, 76)) lines.append(indent(help, 4)) return '\n'.join(lines) @classmethod def class_print_help(cls, inst=None): """Get the help string for a single trait and print it.""" print(cls.class_get_help(inst)) @classmethod def class_config_section(cls): """Get the config class config section""" def c(s): """return a commented, wrapped block.""" s = '\n\n'.join(wrap_paragraphs(s, 78)) return '# ' + s.replace('\n', '\n# ') # section header breaker = '#' + '-' * 78 s = "# %s configuration" % cls.__name__ lines = [breaker, s, breaker, ''] # get the description trait desc = cls.class_traits().get('description') if desc: desc = desc.default_value else: # no description trait, use __doc__ desc = getattr(cls, '__doc__', '') if desc: lines.append(c(desc)) lines.append('') for name, trait in sorted(cls.class_own_traits(config=True).items()): lines.append(c(trait.help)) lines.append('# c.%s.%s = %s' % (cls.__name__, name, trait.default_value_repr())) lines.append('') return '\n'.join(lines) @classmethod def class_config_rst_doc(cls): """Generate rST documentation for this class' config options. Excludes traits defined on parent classes. """ lines = [] classname = cls.__name__ for k, trait in sorted(cls.class_own_traits(config=True).items()): ttype = trait.__class__.__name__ termline = classname + '.' + trait.name # Choices or type if 'Enum' in ttype: # include Enum choices termline += ' : ' + '|'.join(repr(x) for x in trait.values) else: termline += ' : ' + ttype lines.append(termline) # Default value try: dvr = trait.default_value_repr() except Exception: dvr = None # ignore defaults we can't construct if dvr is not None: if len(dvr) > 64: dvr = dvr[:61] + '...' # Double up backslashes, so they get to the rendered docs dvr = dvr.replace('\\n', '\\\\n') lines.append(' Default: ``%s``' % dvr) lines.append('') help = trait.help or 'No description' lines.append(indent(dedent(help), 4)) # Blank line lines.append('') return '\n'.join(lines)
class Application(SingletonConfigurable): """A singleton application with full configuration support.""" # The name of the application, will usually match the name of the command # line application name = Unicode('application') # The description of the application that is printed at the beginning # of the help. description = Unicode('This is an application.') # default section descriptions option_description = Unicode(option_description) keyvalue_description = Unicode(keyvalue_description) subcommand_description = Unicode(subcommand_description) python_config_loader_class = PyFileConfigLoader json_config_loader_class = JSONFileConfigLoader # The usage and example string that goes at the end of the help string. examples = Unicode() # A sequence of Configurable subclasses whose config=True attributes will # be exposed at the command line. classes = [] def _classes_inc_parents(self, classes=None): """Iterate through configurable classes, including configurable parents :param classes: The list of classes to iterate; if not set, uses :attr:`classes`. Children should always be after parents, and each class should only be yielded once. """ if classes is None: classes = self.classes seen = set() for c in classes: # We want to sort parents before children, so we reverse the MRO for parent in reversed(c.mro()): if issubclass(parent, Configurable) and (parent not in seen): seen.add(parent) yield parent # The version string of this application. version = Unicode('0.0') # the argv used to initialize the application argv = List() # Whether failing to load config files should prevent startup raise_config_file_errors = Bool( TRAITLETS_APPLICATION_RAISE_CONFIG_FILE_ERROR) # The log level for the application log_level = Enum( (0, 10, 20, 30, 40, 50, 'DEBUG', 'INFO', 'WARN', 'ERROR', 'CRITICAL'), default_value=logging.WARN, help="Set the log level by value or name.").tag(config=True) @observe('log_level') @observe_compat def _log_level_changed(self, change): """Adjust the log level when log_level is set.""" new = change.new if isinstance(new, str): new = getattr(logging, new) self.log_level = new self.log.setLevel(new) _log_formatter_cls = LevelFormatter log_datefmt = Unicode( "%Y-%m-%d %H:%M:%S", help="The date format used by logging formatters for %(asctime)s").tag( config=True) log_format = Unicode( "[%(name)s]%(highlevel)s %(message)s", help="The Logging format template", ).tag(config=True) @observe('log_datefmt', 'log_format') @observe_compat def _log_format_changed(self, change): """Change the log formatter when log_format is set.""" _log_handler = self._get_log_handler() if not _log_handler: warnings.warn( "No Handler found on {self.log}, setting log_format will have no effect", RuntimeWarning, ) return _log_formatter = self._log_formatter_cls(fmt=self.log_format, datefmt=self.log_datefmt) _log_handler.setFormatter(_log_formatter) @default('log') def _log_default(self): """Start logging for this application. The default is to log to stderr using a StreamHandler, if no default handler already exists. The log level starts at logging.WARN, but this can be adjusted by setting the ``log_level`` attribute. """ log = logging.getLogger(self.__class__.__name__) log.setLevel(self.log_level) log.propagate = False _log = log # copied from Logger.hasHandlers() (new in Python 3.2) while _log: if _log.handlers: return log if not _log.propagate: break else: _log = _log.parent if sys.executable and sys.executable.endswith('pythonw.exe'): # this should really go to a file, but file-logging is only # hooked up in parallel applications _log_handler = logging.StreamHandler(open(os.devnull, 'w')) else: _log_handler = logging.StreamHandler() _log_formatter = self._log_formatter_cls(fmt=self.log_format, datefmt=self.log_datefmt) _log_handler.setFormatter(_log_formatter) log.addHandler(_log_handler) return log #: the alias map for configurables #: Keys might strings or tuples for additional options; single-letter alias accessed like `-v`. #: Values might be like "Class.trait" strings of two-tuples: (Class.trait, help-text). aliases = {'log-level': 'Application.log_level'} # flags for loading Configurables or store_const style flags # flags are loaded from this dict by '--key' flags # this must be a dict of two-tuples, the first element being the Config/dict # and the second being the help string for the flag flags = { 'debug': ({ 'Application': { 'log_level': logging.DEBUG, }, }, "Set log-level to debug, for the most verbose logging."), 'show-config': ({ 'Application': { 'show_config': True, }, }, "Show the application's configuration (human-readable format)"), 'show-config-json': ({ 'Application': { 'show_config_json': True, }, }, "Show the application's configuration (json format)"), } # subcommands for launching other applications # if this is not empty, this will be a parent Application # this must be a dict of two-tuples, # the first element being the application class/import string # and the second being the help string for the subcommand subcommands = Dict() # parse_command_line will initialize a subapp, if requested subapp = Instance('traitlets.config.application.Application', allow_none=True) # extra command-line arguments that don't set config values extra_args = List(Unicode()) cli_config = Instance( Config, (), {}, help="""The subset of our configuration that came from the command-line We re-load this configuration after loading config files, to ensure that it maintains highest priority. """) _loaded_config_files = List() show_config = Bool( help="Instead of starting the Application, dump configuration to stdout" ).tag(config=True) show_config_json = Bool( help= "Instead of starting the Application, dump configuration to stdout (as JSON)" ).tag(config=True) @observe('show_config_json') def _show_config_json_changed(self, change): self.show_config = change.new @observe('show_config') def _show_config_changed(self, change): if change.new: self._save_start = self.start self.start = self.start_show_config def __init__(self, **kwargs): SingletonConfigurable.__init__(self, **kwargs) # Ensure my class is in self.classes, so my attributes appear in command line # options and config files. cls = self.__class__ if cls not in self.classes: if self.classes is cls.classes: # class attr, assign instead of insert self.classes = [cls] + self.classes else: self.classes.insert(0, self.__class__) @observe('config') @observe_compat def _config_changed(self, change): super(Application, self)._config_changed(change) self.log.debug('Config changed: %r', change.new) @catch_config_error def initialize(self, argv=None): """Do the basic steps to configure me. Override in subclasses. """ self.parse_command_line(argv) def start(self): """Start the app mainloop. Override in subclasses. """ if self.subapp is not None: return self.subapp.start() def start_show_config(self): """start function used when show_config is True""" config = self.config.copy() # exclude show_config flags from displayed config for cls in self.__class__.mro(): if cls.__name__ in config: cls_config = config[cls.__name__] cls_config.pop('show_config', None) cls_config.pop('show_config_json', None) if self.show_config_json: json.dump(config, sys.stdout, indent=1, sort_keys=True, default=repr) # add trailing newline sys.stdout.write('\n') return if self._loaded_config_files: print("Loaded config files:") for f in self._loaded_config_files: print(' ' + f) print() for classname in sorted(config): class_config = config[classname] if not class_config: continue print(classname) pformat_kwargs = dict(indent=4, compact=True) for traitname in sorted(class_config): value = class_config[traitname] print(' .{} = {}'.format( traitname, pprint.pformat(value, **pformat_kwargs), )) def print_alias_help(self): """Print the alias parts of the help.""" print('\n'.join(self.emit_alias_help())) def emit_alias_help(self): """Yield the lines for alias part of the help.""" if not self.aliases: return classdict = {} for cls in self.classes: # include all parents (up to, but excluding Configurable) in available names for c in cls.mro()[:-3]: classdict[c.__name__] = c for alias, longname in self.aliases.items(): try: if isinstance(longname, tuple): longname, fhelp = longname else: fhelp = None classname, traitname = longname.split('.', 1) cls = classdict[classname] trait = cls.class_traits(config=True)[traitname] fhelp = cls.class_get_trait_help(trait, helptext=fhelp).splitlines() if not isinstance(alias, tuple): alias = (alias, ) alias = sorted(alias, key=len) alias = ', '.join( ('--%s' if len(m) > 1 else '-%s') % m for m in alias) # reformat first line fhelp[0] = fhelp[0].replace('--' + longname, alias) for l in fhelp: yield l yield indent("Equivalent to: [--%s]" % longname) except Exception as ex: self.log.error( 'Failed collecting help-message for alias %r, due to: %s', alias, ex) raise def print_flag_help(self): """Print the flag part of the help.""" print('\n'.join(self.emit_flag_help())) def emit_flag_help(self): """Yield the lines for the flag part of the help.""" if not self.flags: return for flags, (cfg, fhelp) in self.flags.items(): try: if not isinstance(flags, tuple): flags = (flags, ) flags = sorted(flags, key=len) flags = ', '.join( ('--%s' if len(m) > 1 else '-%s') % m for m in flags) yield flags yield indent(dedent(fhelp.strip())) cfg_list = ' '.join('--%s.%s=%s' % (clname, prop, val) for clname, props_dict in cfg.items() for prop, val in props_dict.items()) cfg_txt = "Equivalent to: [%s]" % cfg_list yield indent(dedent(cfg_txt)) except Exception as ex: self.log.error( 'Failed collecting help-message for flag %r, due to: %s', flags, ex) raise def print_options(self): """Print the options part of the help.""" print('\n'.join(self.emit_options_help())) def emit_options_help(self): """Yield the lines for the options part of the help.""" if not self.flags and not self.aliases: return header = 'Options' yield header yield '=' * len(header) for p in wrap_paragraphs(self.option_description): yield p yield '' for l in self.emit_flag_help(): yield l for l in self.emit_alias_help(): yield l yield '' def print_subcommands(self): """Print the subcommand part of the help.""" print('\n'.join(self.emit_subcommands_help())) def emit_subcommands_help(self): """Yield the lines for the subcommand part of the help.""" if not self.subcommands: return header = "Subcommands" yield header yield '=' * len(header) for p in wrap_paragraphs( self.subcommand_description.format(app=self.name)): yield p yield '' for subc, (cls, help) in self.subcommands.items(): yield subc if help: yield indent(dedent(help.strip())) yield '' def emit_help_epilogue(self, classes): """Yield the very bottom lines of the help message. If classes=False (the default), print `--help-all` msg. """ if not classes: yield "To see all available configurables, use `--help-all`." yield '' def print_help(self, classes=False): """Print the help for each Configurable class in self.classes. If classes=False (the default), only flags and aliases are printed. """ print('\n'.join(self.emit_help(classes=classes))) def emit_help(self, classes=False): """Yield the help-lines for each Configurable class in self.classes. If classes=False (the default), only flags and aliases are printed. """ for l in self.emit_description(): yield l for l in self.emit_subcommands_help(): yield l for l in self.emit_options_help(): yield l if classes: help_classes = self._classes_with_config_traits() if help_classes: yield "Class options" yield "=============" for p in wrap_paragraphs(self.keyvalue_description): yield p yield '' for cls in help_classes: yield cls.class_get_help() yield '' for l in self.emit_examples(): yield l for l in self.emit_help_epilogue(classes): yield l def document_config_options(self): """Generate rST format documentation for the config options this application Returns a multiline string. """ return '\n'.join(c.class_config_rst_doc() for c in self._classes_inc_parents()) def print_description(self): """Print the application description.""" print('\n'.join(self.emit_description())) def emit_description(self): """Yield lines with the application description.""" for p in wrap_paragraphs(self.description or self.__doc__): yield p yield '' def print_examples(self): """Print usage and examples (see `emit_examples()`). """ print('\n'.join(self.emit_examples())) def emit_examples(self): """Yield lines with the usage and examples. This usage string goes at the end of the command line help string and should contain examples of the application's usage. """ if self.examples: yield "Examples" yield "--------" yield '' yield indent(dedent(self.examples.strip())) yield '' def print_version(self): """Print the version string.""" print(self.version) @catch_config_error def initialize_subcommand(self, subc, argv=None): """Initialize a subcommand with argv.""" subapp, _ = self.subcommands.get(subc) if isinstance(subapp, str): subapp = import_item(subapp) ## Cannot issubclass() on a non-type (SOhttp://stackoverflow.com/questions/8692430) if isinstance(subapp, type) and issubclass(subapp, Application): # Clear existing instances before... self.__class__.clear_instance() # instantiating subapp... self.subapp = subapp.instance(parent=self) elif callable(subapp): # or ask factory to create it... self.subapp = subapp(self) else: raise AssertionError("Invalid mappings for subcommand '%s'!" % subc) # ... and finally initialize subapp. self.subapp.initialize(argv) def flatten_flags(self): """Flatten flags and aliases for loaders, so cl-args override as expected. This prevents issues such as an alias pointing to InteractiveShell, but a config file setting the same trait in TerminalInteraciveShell getting inappropriate priority over the command-line arg. Also, loaders expect ``(key: longname)`` and not ````key: (longname, help)`` items. Only aliases with exactly one descendent in the class list will be promoted. """ # build a tree of classes in our list that inherit from a particular # it will be a dict by parent classname of classes in our list # that are descendents mro_tree = defaultdict(list) for cls in self.classes: clsname = cls.__name__ for parent in cls.mro()[1:-3]: # exclude cls itself and Configurable,HasTraits,object mro_tree[parent.__name__].append(clsname) # flatten aliases, which have the form: # { 'alias' : 'Class.trait' } aliases = {} for alias, longname in self.aliases.items(): if isinstance(longname, tuple): longname, _ = longname cls, trait = longname.split('.', 1) children = mro_tree[cls] if len(children) == 1: # exactly one descendent, promote alias cls = children[0] if not isinstance(aliases, tuple): alias = (alias, ) for al in alias: aliases[al] = '.'.join([cls, trait]) # flatten flags, which are of the form: # { 'key' : ({'Cls' : {'trait' : value}}, 'help')} flags = {} for key, (flagdict, help) in self.flags.items(): newflag = {} for cls, subdict in flagdict.items(): children = mro_tree[cls] # exactly one descendent, promote flag section if len(children) == 1: cls = children[0] if cls in newflag: newflag[cls].update(subdict) else: newflag[cls] = subdict if not isinstance(key, tuple): key = (key, ) for k in key: flags[k] = (newflag, help) return flags, aliases def _create_loader(self, argv, aliases, flags, classes): return KVArgParseConfigLoader(argv, aliases, flags, classes=classes, log=self.log) @catch_config_error def parse_command_line(self, argv=None): """Parse the command line arguments.""" assert not isinstance(argv, str) argv = sys.argv[1:] if argv is None else argv self.argv = [cast_unicode(arg) for arg in argv] if argv and argv[0] == 'help': # turn `ipython help notebook` into `ipython notebook -h` argv = argv[1:] + ['-h'] if self.subcommands and len(argv) > 0: # we have subcommands, and one may have been specified subc, subargv = argv[0], argv[1:] if re.match(r'^\w(\-?\w)*$', subc) and subc in self.subcommands: # it's a subcommand, and *not* a flag or class parameter return self.initialize_subcommand(subc, subargv) # Arguments after a '--' argument are for the script IPython may be # about to run, not IPython iteslf. For arguments parsed here (help and # version), we want to only search the arguments up to the first # occurrence of '--', which we're calling interpreted_argv. try: interpreted_argv = argv[:argv.index('--')] except ValueError: interpreted_argv = argv if any(x in interpreted_argv for x in ('-h', '--help-all', '--help')): self.print_help('--help-all' in interpreted_argv) self.exit(0) if '--version' in interpreted_argv or '-V' in interpreted_argv: self.print_version() self.exit(0) # flatten flags&aliases, so cl-args get appropriate priority: flags, aliases = self.flatten_flags() classes = tuple(self._classes_with_config_traits()) loader = self._create_loader(argv, aliases, flags, classes=classes) try: self.cli_config = deepcopy(loader.load_config()) except SystemExit: # traitlets 5: no longer print help output on error # help output is huge, and comes after the error raise self.update_config(self.cli_config) # store unparsed args in extra_args self.extra_args = loader.extra_args @classmethod def _load_config_files(cls, basefilename, path=None, log=None, raise_config_file_errors=False): """Load config files (py,json) by filename and path. yield each config object in turn. """ if not isinstance(path, list): path = [path] for path in path[::-1]: # path list is in descending priority order, so load files backwards: pyloader = cls.python_config_loader_class(basefilename + '.py', path=path, log=log) if log: log.debug("Looking for %s in %s", basefilename, path or os.getcwd()) jsonloader = cls.json_config_loader_class(basefilename + '.json', path=path, log=log) loaded = [] filenames = [] for loader in [pyloader, jsonloader]: config = None try: config = loader.load_config() except ConfigFileNotFound: pass except Exception: # try to get the full filename, but it will be empty in the # unlikely event that the error raised before filefind finished filename = loader.full_filename or basefilename # problem while running the file if raise_config_file_errors: raise if log: log.error("Exception while loading config file %s", filename, exc_info=True) else: if log: log.debug("Loaded config file: %s", loader.full_filename) if config: for filename, earlier_config in zip(filenames, loaded): collisions = earlier_config.collisions(config) if collisions and log: log.warning( "Collisions detected in {0} and {1} config files." " {1} has higher priority: {2}".format( filename, loader.full_filename, json.dumps(collisions, indent=2), )) yield (config, loader.full_filename) loaded.append(config) filenames.append(loader.full_filename) @property def loaded_config_files(self): """Currently loaded configuration files""" return self._loaded_config_files[:] @catch_config_error def load_config_file(self, filename, path=None): """Load config files by filename and path.""" filename, ext = os.path.splitext(filename) new_config = Config() for (config, filename) in self._load_config_files( filename, path=path, log=self.log, raise_config_file_errors=self.raise_config_file_errors, ): new_config.merge(config) if filename not in self._loaded_config_files: # only add to list of loaded files if not previously loaded self._loaded_config_files.append(filename) # add self.cli_config to preserve CLI config priority new_config.merge(self.cli_config) self.update_config(new_config) def _classes_with_config_traits(self, classes=None): """ Yields only classes with configurable traits, and their subclasses. :param classes: The list of classes to iterate; if not set, uses :attr:`classes`. Thus, produced sample config-file will contain all classes on which a trait-value may be overridden: - either on the class owning the trait, - or on its subclasses, even if those subclasses do not define any traits themselves. """ if classes is None: classes = self.classes cls_to_config = OrderedDict( (cls, bool(cls.class_own_traits(config=True))) for cls in self._classes_inc_parents(classes)) def is_any_parent_included(cls): return any(b in cls_to_config and cls_to_config[b] for b in cls.__bases__) ## Mark "empty" classes for inclusion if their parents own-traits, # and loop until no more classes gets marked. # while True: to_incl_orig = cls_to_config.copy() cls_to_config = OrderedDict( (cls, inc_yes or is_any_parent_included(cls)) for cls, inc_yes in cls_to_config.items()) if cls_to_config == to_incl_orig: break for cl, inc_yes in cls_to_config.items(): if inc_yes: yield cl def generate_config_file(self, classes=None): """generate default config file from Configurables""" lines = ["# Configuration file for %s." % self.name] lines.append('') classes = self.classes if classes is None else classes config_classes = list(self._classes_with_config_traits(classes)) for cls in config_classes: lines.append(cls.class_config_section(config_classes)) return '\n'.join(lines) def exit(self, exit_status=0): self.log.debug("Exiting application: %s" % self.name) sys.exit(exit_status) @classmethod def launch_instance(cls, argv=None, **kwargs): """Launch a global instance of this Application If a global instance already exists, this reinitializes and starts it """ app = cls.instance(**kwargs) app.initialize(argv) app.start()
class Application(SingletonConfigurable): """A singleton application with full configuration support.""" # The name of the application, will usually match the name of the command # line application name = Unicode(u'application') # The description of the application that is printed at the beginning # of the help. description = Unicode(u'This is an application.') # default section descriptions option_description = Unicode(option_description) keyvalue_description = Unicode(keyvalue_description) subcommand_description = Unicode(subcommand_description) python_config_loader_class = PyFileConfigLoader json_config_loader_class = JSONFileConfigLoader # The usage and example string that goes at the end of the help string. examples = Unicode() # A sequence of Configurable subclasses whose config=True attributes will # be exposed at the command line. classes = [] @property def _help_classes(self): """Define `App.help_classes` if CLI classes should differ from config file classes""" return getattr(self, 'help_classes', self.classes) @property def _config_classes(self): """Define `App.config_classes` if config file classes should differ from CLI classes.""" return getattr(self, 'config_classes', self.classes) # The version string of this application. version = Unicode(u'0.0') # the argv used to initialize the application argv = List() # The log level for the application log_level = Enum((0,10,20,30,40,50,'DEBUG','INFO','WARN','ERROR','CRITICAL'), default_value=logging.WARN, config=True, help="Set the log level by value or name.") def _log_level_changed(self, name, old, new): """Adjust the log level when log_level is set.""" if isinstance(new, string_types): new = getattr(logging, new) self.log_level = new self.log.setLevel(new) _log_formatter_cls = LevelFormatter log_datefmt = Unicode("%Y-%m-%d %H:%M:%S", config=True, help="The date format used by logging formatters for %(asctime)s" ) def _log_datefmt_changed(self, name, old, new): self._log_format_changed('log_format', self.log_format, self.log_format) log_format = Unicode("[%(name)s]%(highlevel)s %(message)s", config=True, help="The Logging format template", ) def _log_format_changed(self, name, old, new): """Change the log formatter when log_format is set.""" _log_handler = self.log.handlers[0] _log_formatter = self._log_formatter_cls(fmt=new, datefmt=self.log_datefmt) _log_handler.setFormatter(_log_formatter) log = Instance(logging.Logger) def _log_default(self): """Start logging for this application. The default is to log to stderr using a StreamHandler, if no default handler already exists. The log level starts at logging.WARN, but this can be adjusted by setting the ``log_level`` attribute. """ log = logging.getLogger(self.__class__.__name__) log.setLevel(self.log_level) log.propagate = False _log = log # copied from Logger.hasHandlers() (new in Python 3.2) while _log: if _log.handlers: return log if not _log.propagate: break else: _log = _log.parent if sys.executable.endswith('pythonw.exe'): # this should really go to a file, but file-logging is only # hooked up in parallel applications _log_handler = logging.StreamHandler(open(os.devnull, 'w')) else: _log_handler = logging.StreamHandler() _log_formatter = self._log_formatter_cls(fmt=self.log_format, datefmt=self.log_datefmt) _log_handler.setFormatter(_log_formatter) log.addHandler(_log_handler) return log # the alias map for configurables aliases = Dict({'log-level' : 'Application.log_level'}) # flags for loading Configurables or store_const style flags # flags are loaded from this dict by '--key' flags # this must be a dict of two-tuples, the first element being the Config/dict # and the second being the help string for the flag flags = Dict() def _flags_changed(self, name, old, new): """ensure flags dict is valid""" for key,value in iteritems(new): assert len(value) == 2, "Bad flag: %r:%s"%(key,value) assert isinstance(value[0], (dict, Config)), "Bad flag: %r:%s"%(key,value) assert isinstance(value[1], string_types), "Bad flag: %r:%s"%(key,value) # subcommands for launching other applications # if this is not empty, this will be a parent Application # this must be a dict of two-tuples, # the first element being the application class/import string # and the second being the help string for the subcommand subcommands = Dict() # parse_command_line will initialize a subapp, if requested subapp = Instance('traitlets.config.application.Application', allow_none=True) # extra command-line arguments that don't set config values extra_args = List(Unicode) def __init__(self, **kwargs): SingletonConfigurable.__init__(self, **kwargs) # Ensure my class is in self.classes, so my attributes appear in command line # options and config files. if self.__class__ not in self.classes: self.classes.insert(0, self.__class__) def _config_changed(self, name, old, new): SingletonConfigurable._config_changed(self, name, old, new) self.log.debug('Config changed:') self.log.debug(repr(new)) @catch_config_error def initialize(self, argv=None): """Do the basic steps to configure me. Override in subclasses. """ self.parse_command_line(argv) def start(self): """Start the app mainloop. Override in subclasses. """ if self.subapp is not None: return self.subapp.start() def print_alias_help(self): """Print the alias part of the help.""" if not self.aliases: return lines = [] classdict = {} for cls in self._help_classes: # include all parents (up to, but excluding Configurable) in available names for c in cls.mro()[:-3]: classdict[c.__name__] = c for alias, longname in iteritems(self.aliases): classname, traitname = longname.split('.',1) cls = classdict[classname] trait = cls.class_traits(config=True)[traitname] help = cls.class_get_trait_help(trait).splitlines() # reformat first line help[0] = help[0].replace(longname, alias) + ' (%s)'%longname if len(alias) == 1: help[0] = help[0].replace('--%s='%alias, '-%s '%alias) lines.extend(help) # lines.append('') print(os.linesep.join(lines)) def print_flag_help(self): """Print the flag part of the help.""" if not self.flags: return lines = [] for m, (cfg,help) in iteritems(self.flags): prefix = '--' if len(m) > 1 else '-' lines.append(prefix+m) lines.append(indent(dedent(help.strip()))) # lines.append('') print(os.linesep.join(lines)) def print_options(self): if not self.flags and not self.aliases: return lines = ['Options'] lines.append('-'*len(lines[0])) lines.append('') for p in wrap_paragraphs(self.option_description): lines.append(p) lines.append('') print(os.linesep.join(lines)) self.print_flag_help() self.print_alias_help() print() def print_subcommands(self): """Print the subcommand part of the help.""" if not self.subcommands: return lines = ["Subcommands"] lines.append('-'*len(lines[0])) lines.append('') for p in wrap_paragraphs(self.subcommand_description.format( app=self.name)): lines.append(p) lines.append('') for subc, (cls, help) in iteritems(self.subcommands): lines.append(subc) if help: lines.append(indent(dedent(help.strip()))) lines.append('') print(os.linesep.join(lines)) def print_help(self, classes=False): """Print the help for each Configurable class in self.classes. If classes=False (the default), only flags and aliases are printed. """ self.print_description() self.print_subcommands() self.print_options() if classes: help_classes = self._help_classes if help_classes: print("Class parameters") print("----------------") print() for p in wrap_paragraphs(self.keyvalue_description): print(p) print() for cls in help_classes: cls.class_print_help() print() else: print("To see all available configurables, use `--help-all`") print() self.print_examples() def print_description(self): """Print the application description.""" for p in wrap_paragraphs(self.description): print(p) print() def print_examples(self): """Print usage and examples. This usage string goes at the end of the command line help string and should contain examples of the application's usage. """ if self.examples: print("Examples") print("--------") print() print(indent(dedent(self.examples.strip()))) print() def print_version(self): """Print the version string.""" print(self.version) def update_config(self, config): """Fire the traits events when the config is updated.""" # Save a copy of the current config. newconfig = deepcopy(self.config) # Merge the new config into the current one. newconfig.merge(config) # Save the combined config as self.config, which triggers the traits # events. self.config = newconfig @catch_config_error def initialize_subcommand(self, subc, argv=None): """Initialize a subcommand with argv.""" subapp,help = self.subcommands.get(subc) if isinstance(subapp, string_types): subapp = import_item(subapp) # clear existing instances self.__class__.clear_instance() # instantiate self.subapp = subapp.instance(config=self.config) # and initialize subapp self.subapp.initialize(argv) def flatten_flags(self): """flatten flags and aliases, so cl-args override as expected. This prevents issues such as an alias pointing to InteractiveShell, but a config file setting the same trait in TerminalInteraciveShell getting inappropriate priority over the command-line arg. Only aliases with exactly one descendent in the class list will be promoted. """ # build a tree of classes in our list that inherit from a particular # it will be a dict by parent classname of classes in our list # that are descendents mro_tree = defaultdict(list) for cls in self._help_classes: clsname = cls.__name__ for parent in cls.mro()[1:-3]: # exclude cls itself and Configurable,HasTraits,object mro_tree[parent.__name__].append(clsname) # flatten aliases, which have the form: # { 'alias' : 'Class.trait' } aliases = {} for alias, cls_trait in iteritems(self.aliases): cls,trait = cls_trait.split('.',1) children = mro_tree[cls] if len(children) == 1: # exactly one descendent, promote alias cls = children[0] aliases[alias] = '.'.join([cls,trait]) # flatten flags, which are of the form: # { 'key' : ({'Cls' : {'trait' : value}}, 'help')} flags = {} for key, (flagdict, help) in iteritems(self.flags): newflag = {} for cls, subdict in iteritems(flagdict): children = mro_tree[cls] # exactly one descendent, promote flag section if len(children) == 1: cls = children[0] newflag[cls] = subdict flags[key] = (newflag, help) return flags, aliases @catch_config_error def parse_command_line(self, argv=None): """Parse the command line arguments.""" argv = sys.argv[1:] if argv is None else argv self.argv = [ py3compat.cast_unicode(arg) for arg in argv ] if argv and argv[0] == 'help': # turn `ipython help notebook` into `ipython notebook -h` argv = argv[1:] + ['-h'] if self.subcommands and len(argv) > 0: # we have subcommands, and one may have been specified subc, subargv = argv[0], argv[1:] if re.match(r'^\w(\-?\w)*$', subc) and subc in self.subcommands: # it's a subcommand, and *not* a flag or class parameter return self.initialize_subcommand(subc, subargv) # Arguments after a '--' argument are for the script IPython may be # about to run, not IPython iteslf. For arguments parsed here (help and # version), we want to only search the arguments up to the first # occurrence of '--', which we're calling interpreted_argv. try: interpreted_argv = argv[:argv.index('--')] except ValueError: interpreted_argv = argv if any(x in interpreted_argv for x in ('-h', '--help-all', '--help')): self.print_help('--help-all' in interpreted_argv) self.exit(0) if '--version' in interpreted_argv or '-V' in interpreted_argv: self.print_version() self.exit(0) # flatten flags&aliases, so cl-args get appropriate priority: flags,aliases = self.flatten_flags() loader = KVArgParseConfigLoader(argv=argv, aliases=aliases, flags=flags, log=self.log) config = loader.load_config() self.update_config(config) # store unparsed args in extra_args self.extra_args = loader.extra_args @classmethod def _load_config_files(cls, basefilename, path=None, log=None): """Load config files (py,json) by filename and path. yield each config object in turn. """ if not isinstance(path, list): path = [path] for path in path[::-1]: # path list is in descending priority order, so load files backwards: pyloader = cls.python_config_loader_class(basefilename+'.py', path=path, log=log) jsonloader = cls.json_config_loader_class(basefilename+'.json', path=path, log=log) config = None for loader in [pyloader, jsonloader]: try: config = loader.load_config() except ConfigFileNotFound: pass except Exception: # try to get the full filename, but it will be empty in the # unlikely event that the error raised before filefind finished filename = loader.full_filename or basefilename # problem while running the file if log: log.error("Exception while loading config file %s", filename, exc_info=True) else: if log: log.debug("Loaded config file: %s", loader.full_filename) if config: yield config raise StopIteration @catch_config_error def load_config_file(self, filename, path=None): """Load config files by filename and path.""" filename, ext = os.path.splitext(filename) loaded = [] for config in self._load_config_files(filename, path=path, log=self.log): loaded.append(config) self.update_config(config) if len(loaded) > 1: collisions = loaded[0].collisions(loaded[1]) if collisions: self.log.warn("Collisions detected in {0}.py and {0}.json config files." " {0}.json has higher priority: {1}".format( filename, json.dumps(collisions, indent=2), )) def generate_config_file(self): """generate default config file from Configurables""" lines = ["# Configuration file for %s." % self.name] lines.append('') for cls in self._config_classes: lines.append(cls.class_config_section()) return '\n'.join(lines) def exit(self, exit_status=0): self.log.debug("Exiting application: %s" % self.name) sys.exit(exit_status) @classmethod def launch_instance(cls, argv=None, **kwargs): """Launch a global instance of this Application If a global instance already exists, this reinitializes and starts it """ app = cls.instance(**kwargs) app.initialize(argv) app.start()
class BodyType(Body): body_type = Unicode(help='Selected body type') body_types = Instance(ConfigObj, help='Mapping of body type names to values') # Ratios of radii for shoulders and torso ratio_rt = Float(default_value=0, min=0, max=1, help='Ratio between total radius and torso radius') ratio_rs = Float(default_value=0, min=0, max=1, help='Ratio between total radius and shoulder radius') ratio_ts = Float( default_value=0, min=0, max=1, help='Ratio between total radius and distance from torso to shoulder') # Scales for settings values from truncated normal distribution # TODO: Distributions class as instance traits radius_mean = Float(default_value=0, min=0) radius_scale = Float(default_value=0, min=0) target_velocity_mean = Float(default_value=0, min=0) target_velocity_scale = Float(default_value=0, min=0) mass_mean = Float(default_value=0, min=0) mass_scale = Float(default_value=0, min=0) @staticmethod def _truncnorm(mean, abs_scale): """Individual value from truncnorm""" return np.asscalar(truncnorm(-3.0, 3.0, loc=mean, abs_scale=abs_scale)) @default('body_types') def _default_body_types(self): return load_config(BODY_TYPES_CFG, BODY_TYPES_CFG_SPEC) @observe('body_type') def _observe_body_type(self, change): if change['old'] == '': new = change['new'] for k, v in self.body_types[new].items(): setattr(self, k, v) else: raise TraitError('Body type can only be set once.') @observe('radius_mean', 'radius_scale') def _observe_radius_truncnorm(self, change): if self.radius == 0 and self.radius_mean > 0 and self.radius_scale > 0: self.radius = self._truncnorm(self.radius_mean, self.radius_scale) @observe('radius', 'ratio_rt', 'ratio_rs', 'ratio_ts') def _observe_radius(self, change): """Set torso radius if ratio_rt changes and radius is defined or if radius changes and ratio_rt is defined.""" name = change['name'] if name == 'radius': if self.ratio_rt > 0: self.r_t = self.ratio_rt * self.radius if self.ratio_rs > 0: self.r_s = self.ratio_rs * self.radius if self.ratio_ts > 0: self.r_ts = self.ratio_ts * self.radius elif self.radius > 0: if name == 'ratio_rt': self.r_t = self.ratio_rt * self.radius elif name == 'ratio_rs': self.r_s = self.ratio_rs * self.radius elif name == 'ratio_ts': self.r_ts = self.ratio_ts * self.radius @observe('mass_mean', 'mass_scale') def _observe_mass_truncnorm(self, change): if self.mass == 0 and self.mass_mean > 0 and self.mass_scale > 0: self.mass = self._truncnorm(self.mass_mean, self.mass_scale) @observe('target_velocity_mean', 'target_velocity_scale') def _observe_target_velocity_truncnorm(self, change): if self.target_velocity == 0 and self.target_velocity_mean > 0 and self.target_velocity_scale > 0: self.target_velocity = self._truncnorm(self.target_velocity_mean, self.target_velocity_scale) @observe('mass', 'radius') def _observe_inertia_rot(self, change): if self.inertia_rot == 0 and self.mass > 0 and self.radius > 0: inertia = 4.0 * np.pi mass = 80.0 radius = 0.27 self.inertia_rot = inertia * (self.mass / mass) * (self.radius / radius)**2
class Configurable(HasTraits): config = Instance(Config, (), {}) parent = Instance("traitlets.config.configurable.Configurable", allow_none=True) def __init__(self, **kwargs): """Create a configurable given a config config. Parameters ---------- config : Config If this is empty, default values are used. If config is a :class:`Config` instance, it will be used to configure the instance. parent : Configurable instance, optional The parent Configurable instance of this object. Notes ----- Subclasses of Configurable must call the :meth:`__init__` method of :class:`Configurable` *before* doing anything else and using :func:`super`:: class MyConfigurable(Configurable): def __init__(self, config=None): super(MyConfigurable, self).__init__(config=config) # Then any other code you need to finish initialization. This ensures that instances will be configured properly. """ parent = kwargs.pop("parent", None) if parent is not None: # config is implied from parent if kwargs.get("config", None) is None: kwargs["config"] = parent.config self.parent = parent config = kwargs.pop("config", None) # load kwarg traits, other than config super().__init__(**kwargs) # record traits set by config config_override_names = set() def notice_config_override(change): """Record traits set by both config and kwargs. They will need to be overridden again after loading config. """ if change.name in kwargs: config_override_names.add(change.name) self.observe(notice_config_override) # load config if config is not None: # We used to deepcopy, but for now we are trying to just save # by reference. This *could* have side effects as all components # will share config. In fact, I did find such a side effect in # _config_changed below. If a config attribute value was a mutable type # all instances of a component were getting the same copy, effectively # making that a class attribute. # self.config = deepcopy(config) self.config = config else: # allow _config_default to return something self._load_config(self.config) self.unobserve(notice_config_override) for name in config_override_names: setattr(self, name, kwargs[name]) # ------------------------------------------------------------------------- # Static trait notifiations # ------------------------------------------------------------------------- @classmethod def section_names(cls): """return section names as a list""" return [ c.__name__ for c in reversed(cls.__mro__) if issubclass(c, Configurable) and issubclass(cls, c) ] def _find_my_config(self, cfg): """extract my config from a global Config object will construct a Config object of only the config values that apply to me based on my mro(), as well as those of my parent(s) if they exist. If I am Bar and my parent is Foo, and their parent is Tim, this will return merge following config sections, in this order:: [Bar, Foo.Bar, Tim.Foo.Bar] With the last item being the highest priority. """ cfgs = [cfg] if self.parent: cfgs.append(self.parent._find_my_config(cfg)) my_config = Config() for c in cfgs: for sname in self.section_names(): # Don't do a blind getattr as that would cause the config to # dynamically create the section with name Class.__name__. if c._has_section(sname): my_config.merge(c[sname]) return my_config def _load_config(self, cfg, section_names=None, traits=None): """load traits from a Config object""" if traits is None: traits = self.traits(config=True) if section_names is None: section_names = self.section_names() my_config = self._find_my_config(cfg) # hold trait notifications until after all config has been loaded with self.hold_trait_notifications(): for name, config_value in my_config.items(): if name in traits: if isinstance(config_value, LazyConfigValue): # ConfigValue is a wrapper for using append / update on containers # without having to copy the initial value initial = getattr(self, name) config_value = config_value.get_value(initial) elif isinstance(config_value, DeferredConfig): # DeferredConfig tends to come from CLI/environment variables config_value = config_value.get_value(traits[name]) # We have to do a deepcopy here if we don't deepcopy the entire # config object. If we don't, a mutable config_value will be # shared by all instances, effectively making it a class attribute. setattr(self, name, deepcopy(config_value)) elif not _is_section_key(name) and not isinstance( config_value, Config): from difflib import get_close_matches if isinstance(self, LoggingConfigurable): warn = self.log.warning else: warn = lambda msg: warnings.warn(msg, stacklevel=9 ) # noqa[E371] matches = get_close_matches(name, traits) msg = "Config option `{option}` not recognized by `{klass}`.".format( option=name, klass=self.__class__.__name__) if len(matches) == 1: msg += f" Did you mean `{matches[0]}`?" elif len(matches) >= 1: msg += " Did you mean one of: `{matches}`?".format( matches=", ".join(sorted(matches))) warn(msg) @observe("config") @observe_compat def _config_changed(self, change): """Update all the class traits having ``config=True`` in metadata. For any class trait with a ``config`` metadata attribute that is ``True``, we update the trait with the value of the corresponding config entry. """ # Get all traits with a config metadata entry that is True traits = self.traits(config=True) # We auto-load config section for this class as well as any parent # classes that are Configurable subclasses. This starts with Configurable # and works down the mro loading the config for each section. section_names = self.section_names() self._load_config(change.new, traits=traits, section_names=section_names) def update_config(self, config): """Update config and load the new values""" # traitlets prior to 4.2 created a copy of self.config in order to trigger change events. # Some projects (IPython < 5) relied upon one side effect of this, # that self.config prior to update_config was not modified in-place. # For backward-compatibility, we must ensure that self.config # is a new object and not modified in-place, # but config consumers should not rely on this behavior. self.config = deepcopy(self.config) # load config self._load_config(config) # merge it into self.config self.config.merge(config) # TODO: trigger change event if/when dict-update change events take place # DO NOT trigger full trait-change @classmethod def class_get_help(cls, inst=None): """Get the help string for this class in ReST format. If `inst` is given, it's current trait values will be used in place of class defaults. """ assert inst is None or isinstance(inst, cls) final_help = [] base_classes = ", ".join(p.__name__ for p in cls.__bases__) final_help.append(f"{cls.__name__}({base_classes}) options") final_help.append(len(final_help[0]) * "-") for _, v in sorted(cls.class_traits(config=True).items()): help = cls.class_get_trait_help(v, inst) final_help.append(help) return "\n".join(final_help) @classmethod def class_get_trait_help(cls, trait, inst=None, helptext=None): """Get the helptext string for a single trait. :param inst: If given, it's current trait values will be used in place of the class default. :param helptext: If not given, uses the `help` attribute of the current trait. """ assert inst is None or isinstance(inst, cls) lines = [] header = f"--{cls.__name__}.{trait.name}" if isinstance(trait, (Container, Dict)): multiplicity = trait.metadata.get("multiplicity", "append") if isinstance(trait, Dict): sample_value = "<key-1>=<value-1>" else: sample_value = "<%s-item-1>" % trait.__class__.__name__.lower() if multiplicity == "append": header = f"{header}={sample_value}..." else: header = f"{header} {sample_value}..." else: header = f"{header}=<{trait.__class__.__name__}>" # header = "--%s.%s=<%s>" % (cls.__name__, trait.name, trait.__class__.__name__) lines.append(header) if helptext is None: helptext = trait.help if helptext != "": helptext = "\n".join(wrap_paragraphs(helptext, 76)) lines.append(indent(helptext)) if "Enum" in trait.__class__.__name__: # include Enum choices lines.append(indent("Choices: %s" % trait.info())) if inst is not None: lines.append(indent(f"Current: {getattr(inst, trait.name)!r}")) else: try: dvr = trait.default_value_repr() except Exception: dvr = None # ignore defaults we can't construct if dvr is not None: if len(dvr) > 64: dvr = dvr[:61] + "..." lines.append(indent("Default: %s" % dvr)) return "\n".join(lines) @classmethod def class_print_help(cls, inst=None): """Get the help string for a single trait and print it.""" print(cls.class_get_help(inst)) @classmethod def _defining_class(cls, trait, classes): """Get the class that defines a trait For reducing redundant help output in config files. Returns the current class if: - the trait is defined on this class, or - the class where it is defined would not be in the config file Parameters ---------- trait : Trait The trait to look for classes : list The list of other classes to consider for redundancy. Will return `cls` even if it is not defined on `cls` if the defining class is not in `classes`. """ defining_cls = cls for parent in cls.mro(): if (issubclass(parent, Configurable) and parent in classes and parent.class_own_traits(config=True).get( trait.name, None) is trait): defining_cls = parent return defining_cls @classmethod def class_config_section(cls, classes=None): """Get the config section for this class. Parameters ---------- classes : list, optional The list of other classes in the config file. Used to reduce redundant information. """ def c(s): """return a commented, wrapped block.""" s = "\n\n".join(wrap_paragraphs(s, 78)) return "## " + s.replace("\n", "\n# ") # section header breaker = "#" + "-" * 78 parent_classes = ", ".join(p.__name__ for p in cls.__bases__ if issubclass(p, Configurable)) s = f"# {cls.__name__}({parent_classes}) configuration" lines = [breaker, s, breaker] # get the description trait desc = cls.class_traits().get("description") if desc: desc = desc.default_value if not desc: # no description from trait, use __doc__ desc = getattr(cls, "__doc__", "") if desc: lines.append(c(desc)) lines.append("") for name, trait in sorted(cls.class_traits(config=True).items()): default_repr = trait.default_value_repr() if classes: defining_class = cls._defining_class(trait, classes) else: defining_class = cls if defining_class is cls: # cls owns the trait, show full help if trait.help: lines.append(c(trait.help)) if "Enum" in type(trait).__name__: # include Enum choices lines.append("# Choices: %s" % trait.info()) lines.append("# Default: %s" % default_repr) else: # Trait appears multiple times and isn't defined here. # Truncate help to first line + "See also Original.trait" if trait.help: lines.append(c(trait.help.split("\n", 1)[0])) lines.append(f"# See also: {defining_class.__name__}.{name}") lines.append(f"# c.{cls.__name__}.{name} = {default_repr}") lines.append("") return "\n".join(lines) @classmethod def class_config_rst_doc(cls): """Generate rST documentation for this class' config options. Excludes traits defined on parent classes. """ lines = [] classname = cls.__name__ for _, trait in sorted(cls.class_traits(config=True).items()): ttype = trait.__class__.__name__ termline = classname + "." + trait.name # Choices or type if "Enum" in ttype: # include Enum choices termline += " : " + trait.info_rst() else: termline += " : " + ttype lines.append(termline) # Default value try: dvr = trait.default_value_repr() except Exception: dvr = None # ignore defaults we can't construct if dvr is not None: if len(dvr) > 64: dvr = dvr[:61] + "..." # Double up backslashes, so they get to the rendered docs dvr = dvr.replace("\\n", "\\\\n") lines.append(indent("Default: ``%s``" % dvr)) lines.append("") help = trait.help or "No description" lines.append(indent(dedent(help))) # Blank line lines.append("") return "\n".join(lines)
class Configurable(HasTraits): config = Instance(Config, (), {}) parent = Instance('traitlets.config.configurable.Configurable', allow_none=True) def __init__(self, **kwargs): """Create a configurable given a config config. Parameters ---------- config : Config If this is empty, default values are used. If config is a :class:`Config` instance, it will be used to configure the instance. parent : Configurable instance, optional The parent Configurable instance of this object. Notes ----- Subclasses of Configurable must call the :meth:`__init__` method of :class:`Configurable` *before* doing anything else and using :func:`super`:: class MyConfigurable(Configurable): def __init__(self, config=None): super(MyConfigurable, self).__init__(config=config) # Then any other code you need to finish initialization. This ensures that instances will be configured properly. """ parent = kwargs.pop('parent', None) if parent is not None: # config is implied from parent if kwargs.get('config', None) is None: kwargs['config'] = parent.config self.parent = parent config = kwargs.pop('config', None) # load kwarg traits, other than config super(Configurable, self).__init__(**kwargs) # load config if config is not None: # We used to deepcopy, but for now we are trying to just save # by reference. This *could* have side effects as all components # will share config. In fact, I did find such a side effect in # _config_changed below. If a config attribute value was a mutable type # all instances of a component were getting the same copy, effectively # making that a class attribute. # self.config = deepcopy(config) self.config = config else: # allow _config_default to return something self._load_config(self.config) # Ensure explicit kwargs are applied after loading config. # This is usually redundant, but ensures config doesn't override # explicitly assigned values. for key, value in kwargs.items(): setattr(self, key, value) #------------------------------------------------------------------------- # Static trait notifiations #------------------------------------------------------------------------- @classmethod def section_names(cls): """return section names as a list""" return [ c.__name__ for c in reversed(cls.__mro__) if issubclass(c, Configurable) and issubclass(cls, c) ] def _find_my_config(self, cfg): """extract my config from a global Config object will construct a Config object of only the config values that apply to me based on my mro(), as well as those of my parent(s) if they exist. If I am Bar and my parent is Foo, and their parent is Tim, this will return merge following config sections, in this order:: [Bar, Foo.bar, Tim.Foo.Bar] With the last item being the highest priority. """ cfgs = [cfg] if self.parent: cfgs.append(self.parent._find_my_config(cfg)) my_config = Config() for c in cfgs: for sname in self.section_names(): # Don't do a blind getattr as that would cause the config to # dynamically create the section with name Class.__name__. if c._has_section(sname): my_config.merge(c[sname]) return my_config def _load_config(self, cfg, section_names=None, traits=None): """load traits from a Config object""" if traits is None: traits = self.traits(config=True) if section_names is None: section_names = self.section_names() my_config = self._find_my_config(cfg) # hold trait notifications until after all config has been loaded with self.hold_trait_notifications(): for name, config_value in iteritems(my_config): if name in traits: if isinstance(config_value, LazyConfigValue): # ConfigValue is a wrapper for using append / update on containers # without having to copy the initial value initial = getattr(self, name) config_value = config_value.get_value(initial) # We have to do a deepcopy here if we don't deepcopy the entire # config object. If we don't, a mutable config_value will be # shared by all instances, effectively making it a class attribute. setattr(self, name, deepcopy(config_value)) def _config_changed(self, name, old, new): """Update all the class traits having ``config=True`` as metadata. For any class trait with a ``config`` metadata attribute that is ``True``, we update the trait with the value of the corresponding config entry. """ # Get all traits with a config metadata entry that is True traits = self.traits(config=True) # We auto-load config section for this class as well as any parent # classes that are Configurable subclasses. This starts with Configurable # and works down the mro loading the config for each section. section_names = self.section_names() self._load_config(new, traits=traits, section_names=section_names) def update_config(self, config): """Fire the traits events when the config is updated.""" # Save a copy of the current config. newconfig = deepcopy(self.config) # Merge the new config into the current one. newconfig.merge(config) # Save the combined config as self.config, which triggers the traits # events. self.config = newconfig @classmethod def class_get_help(cls, inst=None): """Get the help string for this class in ReST format. If `inst` is given, it's current trait values will be used in place of class defaults. """ assert inst is None or isinstance(inst, cls) final_help = [] final_help.append(u'%s options' % cls.__name__) final_help.append(len(final_help[0]) * u'-') for k, v in sorted(cls.class_traits(config=True).items()): help = cls.class_get_trait_help(v, inst) final_help.append(help) return '\n'.join(final_help) @classmethod def class_get_trait_help(cls, trait, inst=None): """Get the help string for a single trait. If `inst` is given, it's current trait values will be used in place of the class default. """ assert inst is None or isinstance(inst, cls) lines = [] header = "--%s.%s=<%s>" % (cls.__name__, trait.name, trait.__class__.__name__) lines.append(header) if inst is not None: lines.append(indent('Current: %r' % getattr(inst, trait.name), 4)) else: try: dvr = repr(trait.get_default_value()) except Exception: dvr = None # ignore defaults we can't construct if dvr is not None: if len(dvr) > 64: dvr = dvr[:61] + '...' lines.append(indent('Default: %s' % dvr, 4)) if 'Enum' in trait.__class__.__name__: # include Enum choices lines.append(indent('Choices: %r' % (trait.values, ))) help = trait.get_metadata('help') if help is not None: help = '\n'.join(wrap_paragraphs(help, 76)) lines.append(indent(help, 4)) return '\n'.join(lines) @classmethod def class_print_help(cls, inst=None): """Get the help string for a single trait and print it.""" print(cls.class_get_help(inst)) @classmethod def class_config_section(cls): """Get the config class config section""" def c(s): """return a commented, wrapped block.""" s = '\n\n'.join(wrap_paragraphs(s, 78)) return '# ' + s.replace('\n', '\n# ') # section header breaker = '#' + '-' * 78 s = "# %s configuration" % cls.__name__ lines = [breaker, s, breaker, ''] # get the description trait desc = cls.class_traits().get('description') if desc: desc = desc.default_value else: # no description trait, use __doc__ desc = getattr(cls, '__doc__', '') if desc: lines.append(c(desc)) lines.append('') parents = [] for parent in cls.mro(): # only include parents that are not base classes # and are not the class itself # and have some configurable traits to inherit if parent is not cls and issubclass(parent, Configurable) and \ parent.class_traits(config=True): parents.append(parent) if parents: pstr = ', '.join([p.__name__ for p in parents]) lines.append( c('%s will inherit config from: %s' % (cls.__name__, pstr))) lines.append('') for name, trait in iteritems(cls.class_traits(config=True)): help = trait.get_metadata('help') or '' lines.append(c(help)) lines.append('# c.%s.%s = %r' % (cls.__name__, name, trait.get_default_value())) lines.append('') return '\n'.join(lines)
class SaveSimulationData(LogicNode): """Logic for saving simulation data. Saved once - Geometry - Metadata Saved continuously - Agents - Data Examples: >>> def save_condition(simulation, frequency=100): >>> return (simulation.data['iterations'] + 1) % frequency == 0 >>> >>> node = SaveSimulationData(save_condition=save_condition, >>> base_directory='.') """ save_condition = Instance( Callable, # allow_none=True, help='Function to trigger saving of data.') base_directory = Unicode( default_value='.', help='Path to the directory where simulation data should be saved.') save_directory = Unicode( help='Name of the directory to save current simulation.') def __init__(self, simulation, *args, **kwargs): super().__init__(simulation, *args, **kwargs) os.makedirs(self.full_path, exist_ok=True) # Metadata save_data_csv = save_csv(self.full_path, 'metadata') save_data_csv.send(None) save_data_csv.send(self.simulation.metadata) save_data_csv.send(True) # Geometry geometries = { name: getattr(self.simulation.field, name) for name in ('domain', 'obstacles', 'targets', 'spawns') } save_geometry_json(os.path.join(self.full_path, 'geometry.json'), geometries) # Data self.save_data_csv = save_csv(self.full_path, 'data') self.save_data_csv.send(None) # Agents self.save_agent_npy = save_npy(self.full_path, 'agents') self.save_agent_npy.send(None) @property def full_path(self): return os.path.join(os.path.abspath(self.base_directory), self.save_directory) @default('save_directory') def _default_save_directory(self): return self.simulation.name_with_timestamp def add_to_simulation_logic(self): self.simulation.logic['Reset'].inject_before(self) def update(self): save = self.save_condition(self.simulation) self.save_agent_npy.send(self.simulation.agents.array) self.save_agent_npy.send(save) self.save_data_csv.send(self.simulation.data) self.save_data_csv.send(save)
class SSHHydraKernelProvisioner(FileManagementMixin, HydraKernelProvisioner): host = Unicode() user = Unicode() private_key_file = Unicode(allow_none=True) timeout = Int(DEFAULT_SSH_TIMEOUT) sudo = Bool(False) host_key_checking = Bool( False, help= ("If set, remote connections to hosts that do not have an entry in the " "system host key list will raise an error."), ) connection = Instance("hydra_kernel.provisioning.ssh.SSHConnection", allow_none=True) pid = Int(allow_none=True) _kernelspecs: "Dict" = None _subkernel_connection: "KernelConnectionInfo" = None _tunnels: "Dict[str, Tuple[str, int]]" = {} _tunnel_ctl_path: "str" = None @property def has_process(self) -> bool: if self.connection is None: return False if self.pid is None: return False return True def reset(self) -> None: self.connection = None self.pid = None self._kernelspecs = None self._tunnels = {} self._tunnel_ctl_path = None async def _save_host_key(self): hosts_file_path = pathlib.Path(pathlib.Path.home(), ".ssh", "known_hosts") hosts_file_path.parent.mkdir(exist_ok=True) hosts_file_path.touch() with hosts_file_path.open("r+") as hosts_file: start = f"# BEGIN hydra_kernel: {self.host}" end = f"# END hydra_kernel: {self.host}" lines = hosts_file.readlines() start_i, end_i = 0, 0 for i, line in enumerate(lines): if line == start: start_i = i elif line == end: end_i = i break # Splice out block lines = lines[:start_i] + lines[end_i:] proc = await asyncio.create_subprocess_exec( "ssh-keyscan", "-H", shlex.quote(self.host), stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, ) stdout, stderr = await proc.communicate() if proc.returncode != 0: LOG.warning(( f"Failed to update host key for {self.host}: {stderr.read()}" )) lines.append(start) lines.append(stdout.decode("utf-8")) lines.append(end) hosts_file.seek(0) hosts_file.write("\n".join(lines)) async def pre_launch(self, **kwargs: "Any") -> "Dict[str, Any]": kwargs = await super().pre_launch(**kwargs) self.connection = SSHConnection(parent=self) # Check if desired kernel exists on remote self.binding.update_progress("Checking host kernels") if not await self.has_hydra_kernelspec(self.subkernel_name): self.binding.update_progress( f"Installing {self.subkernel_name} kernel") await self.provision_hydra_kernelspec(self.subkernel_name) kwargs["cmd"] = [ "hydra-agent", f"--kernel={self.subkernel_name}", f"--id={self.kernel_id}", f"--launcher=hydra-subkernel", ] return kwargs async def launch_kernel(self, command, **kwargs): self.binding.update_progress("Establishing secure connection") LOG.debug(f"{self.binding.name}: kernel_cmd={command}") subkernel = self.connection.exec_json(command, login=True) self._subkernel_connection = subkernel["connection"] LOG.debug( f"{self.binding.name}: connection={self._subkernel_connection}") conn_info = self._subkernel_connection.copy() if not self.host_key_checking: await self._save_host_key() for port_name in port_names: conn_info[port_name] = await self._tunnel_to_port(port_name) self.pid = int(subkernel["pid"]) return conn_info async def send_signal(self, signum): try: self.connection.exec(f"kill -{signum} {self.pid}") except BindingConnectionError as exc: LOG.error(f"Failed to send signal: {exc}") async def poll(self) -> "Optional[int]": try: # TODO: also check status of tunnels here self.connection.exec(f"kill -0 {self.pid}") except OSError: return -1 async def cleanup(self, restart: bool = False) -> None: for port_name, tunnel in self._tunnels.items(): try: LOG.debug( f"Killing {port_name} SSH tunnel (pid={tunnel['pid']})") os.kill(tunnel["pid"]) except OSError: pass self._tunnels = {} async def has_hydra_kernelspec(self, kernel_name): try: ret, _, _ = self.connection.exec("which hydra-subkernel") if ret != 0: return False except RuntimeError as exc: LOG.error( f"Failed to check for hydra binaries on {self.binding_name}: {exc}" ) return False if not self._kernelspecs: LOG.info(f"Fetching all kernel specs for '{self.binding.name}'") try: self._kernelspecs = self.connection.exec_json( "jupyter kernelspec list --json --log-level ERROR", login=True)["kernelspecs"] except RuntimeError as exc: LOG.warn( f"Failed to list kernel specs on {self.binding.name}: {exc}" ) return False for spec_name, spec_info in self._kernelspecs.items(): lang = spec_info["spec"]["language"] if spec_name == kernel_name or lang == kernel_name: return True return False async def provision_hydra_kernelspec(self, kernel_name): ansible_dir = os.path.join(sys.prefix, "share", "hydra-kernel", "ansible") host_vars = { "ansible_host": self.host, "ansible_user": self.user, "ansible_become": self.sudo, "ansible_ssh_private_key_file": _expand_path(self.private_key_file), # TODO: handle "via" } with tempfile.TemporaryDirectory() as tmpdir: with redirect_stdout(io.StringIO()): runner = ansible_runner.run( private_data_dir=tmpdir, project_dir=ansible_dir, inventory={"all": { "hosts": { "KERNEL": host_vars } }}, playbook="kernel_action.yml", extravars={ "kernel_name": kernel_name, "kernel_action": "install", }, event_handler=self._on_ansible_event, # Don't output to stdout, store as JSON instead quiet=True, json_mode=True, ) LOG.debug(runner.stdout.read()) if runner.status != "successful" or runner.errored: raise RuntimeError( f"Failed to install kernel {kernel_name}") # Invalidate kernelspecs as we have installed a new one self._kernelspecs = None async def upload_path(self, local_path: "str", remote_path: "str" = None): req_id = uuid.uuid4() tmp_archive = f"/tmp/{req_id}.tar.gz" self.binding.update_progress("Preparing upload") fd = self.prepare_upload(local_path) def _on_progress(filename, size, sent): self.binding.update_progress( f"Uploading ({math.floor((sent/size) * 100)}%)") self.connection.put_file(fd, tmp_archive, on_progress=_on_progress) self.binding.update_progress("Finishing") self.connection.exec(["tar", "xzf", tmp_archive, "-C", remote_path]) self.connection.exec(["rm", "-f", tmp_archive]) async def download_path(self, remote_path: "str", local_path: "str" = None): req_id = uuid.uuid4() tmp_archive = f"/tmp/{req_id}.tar.gz" self.binding.update_progress("Preparing download") self.connection.exec( ["tar", "czf", tmp_archive, "-C", remote_path, "."]) def _on_progress(filename, size, sent): self.binding.update_progress( f"Downloading ({math.floor((sent/size) * 100)}%)") with self.connection.get_file(tmp_archive, on_progress=_on_progress) as archive_fd: self.binding.update_progress("Finishing") with tarfile.open(fileobj=archive_fd, mode="r") as tar: tar.extractall(local_path) self.connection.exec(["rm", "-f", tmp_archive]) async def _tunnel_to_port(self, port_name: "str", lport: "int" = None) -> "int": stream = io.StringIO() error = None with redirect_stdout(stream): try: subkernel_conn = self._subkernel_connection self.binding.update_progress( f"Starting kernel {port_name} tunnel") if not await self._is_tunnel_up(): await self._start_tunnel() if not lport: lport = select_random_ports(1)[0] await self._forward_over_tunnel(lport, subkernel_conn[port_name]) self._tunnels[port_name] = lport except (RuntimeError, TypeError) as exc: error = exc if error: stream.seek(0) LOG.error(f"error={error}, stdout={stream.read()}") raise RuntimeError( f"Failed to establish tunnel for {port_name}: {error}") return self._tunnels[port_name] @property def _ssh_host(self): return f"{self.user}@{self.host}" @property def _ssh_cmd(self): cmd = ["ssh"] if self.private_key_file: cmd.extend(["-i", _expand_path(self.private_key_file)]) return cmd async def _start_tunnel(self): self._tunnel_ctl_path = pathlib.Path( tempfile.gettempdir(), f"{self.user}-{self.host.replace('.', '-')}.tunnel") if self._tunnel_ctl_path.exists(): self._tunnel_ctl_path.unlink() cmd = self._ssh_cmd cmd.extend([ "-fN", # -f = background process, -N = don't run a command "-o", "ControlMaster=yes", "-o", f"ControlPath={self._tunnel_ctl_path}", "-o", "ServerAliveInterval=5", self._ssh_host, ]) tunnel_proc = await asyncio.create_subprocess_exec(*cmd) _, stderr = await tunnel_proc.communicate() if tunnel_proc.returncode != 0: try: self._tunnel_ctl_path = None except: pass raise RuntimeError( f"Failed to establish SSH tunnel to {self.host}") async def _forward_over_tunnel(self, lport, rport): LOG.debug(f"_forward_over_tunnel: forwarding {lport} => {rport}") returncode, _, _ = await self._tunnel_command( ["forward", "-L", f"127.0.0.1:{lport}:127.0.0.1:{rport}"]) async def _is_tunnel_up(self): if not self._tunnel_ctl_path: return False returncode, _, _ = await self._tunnel_command(["check"]) return returncode == 0 async def _tunnel_command(self, cmd: "list[str]"): proc = await asyncio.create_subprocess_exec( *self._ssh_cmd, "-o", f"ControlPath={self._tunnel_ctl_path}", "-O", *cmd, self._ssh_host, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, ) stdout, stderr = await proc.communicate() return proc.returncode, stdout, stderr def _on_ansible_event(self, event): current_task = event.get("event_data", {}).get("task") if current_task: self.binding.update_progress(current_task)
class IPKernelApp(BaseIPythonApplication, InteractiveShellApp, ConnectionFileMixin): name = "ipython-kernel" aliases = Dict(kernel_aliases) flags = Dict(kernel_flags) classes = [IPythonKernel, ZMQInteractiveShell, ProfileDir, Session] # the kernel class, as an importstring kernel_class = Type( "ipykernel.ipkernel.IPythonKernel", klass="ipykernel.kernelbase.Kernel", help="""The Kernel subclass to be used. This should allow easy re-use of the IPKernelApp entry point to configure and launch kernels other than IPython's own. """, ).tag(config=True) kernel = Any() poller = Any() # don't restrict this even though current pollers are all Threads heartbeat = Instance(Heartbeat, allow_none=True) context = Any() shell_socket = Any() control_socket = Any() debugpy_socket = Any() debug_shell_socket = Any() stdin_socket = Any() iopub_socket = Any() iopub_thread = Any() control_thread = Any() _ports = Dict() subcommands = { "install": ( "ipykernel.kernelspec.InstallIPythonKernelSpecApp", "Install the IPython kernel", ), } # connection info: connection_dir = Unicode() @default("connection_dir") def _default_connection_dir(self): return jupyter_runtime_dir() @property def abs_connection_file(self): if os.path.basename(self.connection_file) == self.connection_file: return os.path.join(self.connection_dir, self.connection_file) else: return self.connection_file # streams, etc. no_stdout = Bool(False, help="redirect stdout to the null device").tag(config=True) no_stderr = Bool(False, help="redirect stderr to the null device").tag(config=True) trio_loop = Bool(False, help="Set main event loop.").tag(config=True) quiet = Bool(True, help="Only send stdout/stderr to output stream").tag(config=True) outstream_class = DottedObjectName( "ipykernel.iostream.OutStream", help="The importstring for the OutStream factory" ).tag(config=True) displayhook_class = DottedObjectName( "ipykernel.displayhook.ZMQDisplayHook", help="The importstring for the DisplayHook factory" ).tag(config=True) capture_fd_output = Bool( True, help="""Attempt to capture and forward low-level output, e.g. produced by Extension libraries. """, ).tag(config=True) # polling parent_handle = Integer( int(os.environ.get("JPY_PARENT_PID") or 0), help="""kill this process if its parent dies. On Windows, the argument specifies the HANDLE of the parent process, otherwise it is simply boolean. """, ).tag(config=True) interrupt = Integer( int(os.environ.get("JPY_INTERRUPT_EVENT") or 0), help="""ONLY USED ON WINDOWS Interrupt this process when the parent is signaled. """, ).tag(config=True) def init_crash_handler(self): sys.excepthook = self.excepthook def excepthook(self, etype, evalue, tb): # write uncaught traceback to 'real' stderr, not zmq-forwarder traceback.print_exception(etype, evalue, tb, file=sys.__stderr__) def init_poller(self): if sys.platform == "win32": if self.interrupt or self.parent_handle: self.poller = ParentPollerWindows(self.interrupt, self.parent_handle) elif self.parent_handle and self.parent_handle != 1: # PID 1 (init) is special and will never go away, # only be reassigned. # Parent polling doesn't work if ppid == 1 to start with. self.poller = ParentPollerUnix() def _try_bind_socket(self, s, port): iface = "%s://%s" % (self.transport, self.ip) if self.transport == "tcp": if port <= 0: port = s.bind_to_random_port(iface) else: s.bind("tcp://%s:%i" % (self.ip, port)) elif self.transport == "ipc": if port <= 0: port = 1 path = "%s-%i" % (self.ip, port) while os.path.exists(path): port = port + 1 path = "%s-%i" % (self.ip, port) else: path = "%s-%i" % (self.ip, port) s.bind("ipc://%s" % path) return port def _bind_socket(self, s, port): try: win_in_use = errno.WSAEADDRINUSE # type:ignore[attr-defined] except AttributeError: win_in_use = None # Try up to 100 times to bind a port when in conflict to avoid # infinite attempts in bad setups max_attempts = 1 if port else 100 for attempt in range(max_attempts): try: return self._try_bind_socket(s, port) except zmq.ZMQError as ze: # Raise if we have any error not related to socket binding if ze.errno != errno.EADDRINUSE and ze.errno != win_in_use: raise if attempt == max_attempts - 1: raise def write_connection_file(self): """write connection info to JSON file""" cf = self.abs_connection_file self.log.debug("Writing connection file: %s", cf) write_connection_file( cf, ip=self.ip, key=self.session.key, transport=self.transport, shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port, iopub_port=self.iopub_port, control_port=self.control_port, ) def cleanup_connection_file(self): cf = self.abs_connection_file self.log.debug("Cleaning up connection file: %s", cf) try: os.remove(cf) except OSError: pass self.cleanup_ipc_files() def init_connection_file(self): if not self.connection_file: self.connection_file = "kernel-%s.json" % os.getpid() try: self.connection_file = filefind(self.connection_file, [".", self.connection_dir]) except OSError: self.log.debug("Connection file not found: %s", self.connection_file) # This means I own it, and I'll create it in this directory: os.makedirs(os.path.dirname(self.abs_connection_file), mode=0o700, exist_ok=True) # Also, I will clean it up: atexit.register(self.cleanup_connection_file) return try: self.load_connection_file() except Exception: self.log.error( "Failed to load connection file: %r", self.connection_file, exc_info=True ) self.exit(1) def init_sockets(self): # Create a context, a session, and the kernel sockets. self.log.info("Starting the kernel at pid: %i", os.getpid()) assert self.context is None, "init_sockets cannot be called twice!" self.context = context = zmq.Context() atexit.register(self.close) self.shell_socket = context.socket(zmq.ROUTER) self.shell_socket.linger = 1000 self.shell_port = self._bind_socket(self.shell_socket, self.shell_port) self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port) self.stdin_socket = context.socket(zmq.ROUTER) self.stdin_socket.linger = 1000 self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port) self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port) if hasattr(zmq, "ROUTER_HANDOVER"): # set router-handover to workaround zeromq reconnect problems # in certain rare circumstances # see ipython/ipykernel#270 and zeromq/libzmq#2892 self.shell_socket.router_handover = self.stdin_socket.router_handover = 1 self.init_control(context) self.init_iopub(context) def init_control(self, context): self.control_socket = context.socket(zmq.ROUTER) self.control_socket.linger = 1000 self.control_port = self._bind_socket(self.control_socket, self.control_port) self.log.debug("control ROUTER Channel on port: %i" % self.control_port) self.debugpy_socket = context.socket(zmq.STREAM) self.debugpy_socket.linger = 1000 self.debug_shell_socket = context.socket(zmq.DEALER) self.debug_shell_socket.linger = 1000 if self.shell_socket.getsockopt(zmq.LAST_ENDPOINT): self.debug_shell_socket.connect(self.shell_socket.getsockopt(zmq.LAST_ENDPOINT)) if hasattr(zmq, "ROUTER_HANDOVER"): # set router-handover to workaround zeromq reconnect problems # in certain rare circumstances # see ipython/ipykernel#270 and zeromq/libzmq#2892 self.control_socket.router_handover = 1 self.control_thread = ControlThread(daemon=True) def init_iopub(self, context): self.iopub_socket = context.socket(zmq.PUB) self.iopub_socket.linger = 1000 self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port) self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port) self.configure_tornado_logger() self.iopub_thread = IOPubThread(self.iopub_socket, pipe=True) self.iopub_thread.start() # backward-compat: wrap iopub socket API in background thread self.iopub_socket = self.iopub_thread.background_socket def init_heartbeat(self): """start the heart beating""" # heartbeat doesn't share context, because it mustn't be blocked # by the GIL, which is accessed by libzmq when freeing zero-copy messages hb_ctx = zmq.Context() self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port)) self.hb_port = self.heartbeat.port self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port) self.heartbeat.start() def close(self): """Close zmq sockets in an orderly fashion""" # un-capture IO before we start closing channels self.reset_io() self.log.info("Cleaning up sockets") if self.heartbeat: self.log.debug("Closing heartbeat channel") self.heartbeat.context.term() if self.iopub_thread: self.log.debug("Closing iopub channel") self.iopub_thread.stop() self.iopub_thread.close() if self.control_thread and self.control_thread.is_alive(): self.log.debug("Closing control thread") self.control_thread.stop() self.control_thread.join() if self.debugpy_socket and not self.debugpy_socket.closed: self.debugpy_socket.close() if self.debug_shell_socket and not self.debug_shell_socket.closed: self.debug_shell_socket.close() for channel in ("shell", "control", "stdin"): self.log.debug("Closing %s channel", channel) socket = getattr(self, channel + "_socket", None) if socket and not socket.closed: socket.close() self.log.debug("Terminating zmq context") self.context.term() self.log.debug("Terminated zmq context") def log_connection_info(self): """display connection info, and store ports""" basename = os.path.basename(self.connection_file) if ( basename == self.connection_file or os.path.dirname(self.connection_file) == self.connection_dir ): # use shortname tail = basename else: tail = self.connection_file lines = [ "To connect another client to this kernel, use:", " --existing %s" % tail, ] # log connection info # info-level, so often not shown. # frontends should use the %connect_info magic # to see the connection info for line in lines: self.log.info(line) # also raw print to the terminal if no parent_handle (`ipython kernel`) # unless log-level is CRITICAL (--quiet) if not self.parent_handle and self.log_level < logging.CRITICAL: print(_ctrl_c_message, file=sys.__stdout__) for line in lines: print(line, file=sys.__stdout__) self._ports = dict( shell=self.shell_port, iopub=self.iopub_port, stdin=self.stdin_port, hb=self.hb_port, control=self.control_port, ) def init_blackhole(self): """redirects stdout/stderr to devnull if necessary""" if self.no_stdout or self.no_stderr: blackhole = open(os.devnull, "w") if self.no_stdout: sys.stdout = sys.__stdout__ = blackhole if self.no_stderr: sys.stderr = sys.__stderr__ = blackhole def init_io(self): """Redirect input streams and set a display hook.""" if self.outstream_class: outstream_factory = import_item(str(self.outstream_class)) if sys.stdout is not None: sys.stdout.flush() e_stdout = None if self.quiet else sys.__stdout__ e_stderr = None if self.quiet else sys.__stderr__ if not self.capture_fd_output: outstream_factory = partial(outstream_factory, watchfd=False) sys.stdout = outstream_factory(self.session, self.iopub_thread, "stdout", echo=e_stdout) if sys.stderr is not None: sys.stderr.flush() sys.stderr = outstream_factory(self.session, self.iopub_thread, "stderr", echo=e_stderr) if hasattr(sys.stderr, "_original_stdstream_copy"): for handler in self.log.handlers: if isinstance(handler, StreamHandler) and (handler.stream.buffer.fileno() == 2): self.log.debug("Seeing logger to stderr, rerouting to raw filedescriptor.") handler.stream = TextIOWrapper( FileIO( sys.stderr._original_stdstream_copy, # type:ignore[attr-defined] "w", ) ) if self.displayhook_class: displayhook_factory = import_item(str(self.displayhook_class)) self.displayhook = displayhook_factory(self.session, self.iopub_socket) sys.displayhook = self.displayhook self.patch_io() def reset_io(self): """restore original io restores state after init_io """ sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ sys.displayhook = sys.__displayhook__ def patch_io(self): """Patch important libraries that can't handle sys.stdout forwarding""" try: import faulthandler except ImportError: pass else: # Warning: this is a monkeypatch of `faulthandler.enable`, watch for possible # updates to the upstream API and update accordingly (up-to-date as of Python 3.5): # https://docs.python.org/3/library/faulthandler.html#faulthandler.enable # change default file to __stderr__ from forwarded stderr faulthandler_enable = faulthandler.enable def enable(file=sys.__stderr__, all_threads=True, **kwargs): return faulthandler_enable(file=file, all_threads=all_threads, **kwargs) faulthandler.enable = enable if hasattr(faulthandler, "register"): faulthandler_register = faulthandler.register def register(signum, file=sys.__stderr__, all_threads=True, chain=False, **kwargs): return faulthandler_register( signum, file=file, all_threads=all_threads, chain=chain, **kwargs ) faulthandler.register = register def init_signal(self): signal.signal(signal.SIGINT, signal.SIG_IGN) def init_kernel(self): """Create the Kernel object itself""" shell_stream = ZMQStream(self.shell_socket) control_stream = ZMQStream(self.control_socket, self.control_thread.io_loop) debugpy_stream = ZMQStream(self.debugpy_socket, self.control_thread.io_loop) self.control_thread.start() kernel_factory = self.kernel_class.instance kernel = kernel_factory( parent=self, session=self.session, control_stream=control_stream, debugpy_stream=debugpy_stream, debug_shell_socket=self.debug_shell_socket, shell_stream=shell_stream, control_thread=self.control_thread, iopub_thread=self.iopub_thread, iopub_socket=self.iopub_socket, stdin_socket=self.stdin_socket, log=self.log, profile_dir=self.profile_dir, user_ns=self.user_ns, ) kernel.record_ports({name + "_port": port for name, port in self._ports.items()}) self.kernel = kernel # Allow the displayhook to get the execution count self.displayhook.get_execution_count = lambda: kernel.execution_count def init_gui_pylab(self): """Enable GUI event loop integration, taking pylab into account.""" # Register inline backend as default # this is higher priority than matplotlibrc, # but lower priority than anything else (mpl.use() for instance). # This only affects matplotlib >= 1.5 if not os.environ.get("MPLBACKEND"): os.environ["MPLBACKEND"] = "module://matplotlib_inline.backend_inline" # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab` # to ensure that any exception is printed straight to stderr. # Normally _showtraceback associates the reply with an execution, # which means frontends will never draw it, as this exception # is not associated with any execute request. shell = self.shell assert shell is not None _showtraceback = shell._showtraceback try: # replace error-sending traceback with stderr def print_tb(etype, evalue, stb): print("GUI event loop or pylab initialization failed", file=sys.stderr) assert shell is not None print(shell.InteractiveTB.stb2text(stb), file=sys.stderr) shell._showtraceback = print_tb InteractiveShellApp.init_gui_pylab(self) finally: shell._showtraceback = _showtraceback def init_shell(self): self.shell = getattr(self.kernel, "shell", None) if self.shell: self.shell.configurables.append(self) def configure_tornado_logger(self): """Configure the tornado logging.Logger. Must set up the tornado logger or else tornado will call basicConfig for the root logger which makes the root logger go to the real sys.stderr instead of the capture streams. This function mimics the setup of logging.basicConfig. """ logger = logging.getLogger("tornado") handler = logging.StreamHandler() formatter = logging.Formatter(logging.BASIC_FORMAT) handler.setFormatter(formatter) logger.addHandler(handler) def _init_asyncio_patch(self): """set default asyncio policy to be compatible with tornado Tornado 6 (at least) is not compatible with the default asyncio implementation on Windows Pick the older SelectorEventLoopPolicy on Windows if the known-incompatible default policy is in use. Support for Proactor via a background thread is available in tornado 6.1, but it is still preferable to run the Selector in the main thread instead of the background. do this as early as possible to make it a low priority and overrideable ref: https://github.com/tornadoweb/tornado/issues/2608 FIXME: if/when tornado supports the defaults in asyncio without threads, remove and bump tornado requirement for py38. Most likely, this will mean a new Python version where asyncio.ProactorEventLoop supports add_reader and friends. """ if sys.platform.startswith("win") and sys.version_info >= (3, 8): import asyncio try: from asyncio import ( WindowsProactorEventLoopPolicy, WindowsSelectorEventLoopPolicy, ) except ImportError: pass # not affected else: if type(asyncio.get_event_loop_policy()) is WindowsProactorEventLoopPolicy: # WindowsProactorEventLoopPolicy is not compatible with tornado 6 # fallback to the pre-3.8 default of Selector asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy()) def init_pdb(self): """Replace pdb with IPython's version that is interruptible. With the non-interruptible version, stopping pdb() locks up the kernel in a non-recoverable state. """ import pdb from IPython.core import debugger if hasattr(debugger, "InterruptiblePdb"): # Only available in newer IPython releases: debugger.Pdb = debugger.InterruptiblePdb pdb.Pdb = debugger.Pdb # type:ignore[misc] pdb.set_trace = debugger.set_trace @catch_config_error def initialize(self, argv=None): self._init_asyncio_patch() super().initialize(argv) if self.subapp is not None: return self.init_pdb() self.init_blackhole() self.init_connection_file() self.init_poller() self.init_sockets() self.init_heartbeat() # writing/displaying connection info must be *after* init_sockets/heartbeat self.write_connection_file() # Log connection info after writing connection file, so that the connection # file is definitely available at the time someone reads the log. self.log_connection_info() self.init_io() try: self.init_signal() except Exception: # Catch exception when initializing signal fails, eg when running the # kernel on a separate thread if self.log_level < logging.CRITICAL: self.log.error("Unable to initialize signal:", exc_info=True) self.init_kernel() # shell init steps self.init_path() self.init_shell() if self.shell: self.init_gui_pylab() self.init_extensions() self.init_code() # flush stdout/stderr, so that anything written to these streams during # initialization do not get associated with the first execution request sys.stdout.flush() sys.stderr.flush() def start(self): if self.subapp is not None: return self.subapp.start() if self.poller is not None: self.poller.start() self.kernel.start() self.io_loop = ioloop.IOLoop.current() if self.trio_loop: from ipykernel.trio_runner import TrioRunner tr = TrioRunner() tr.initialize(self.kernel, self.io_loop) try: tr.run() except KeyboardInterrupt: pass else: try: self.io_loop.start() except KeyboardInterrupt: pass
class Application(SingletonConfigurable): """A singleton application with full configuration support.""" # The name of the application, will usually match the name of the command # line application name = Unicode("application") # The description of the application that is printed at the beginning # of the help. description = Unicode("This is an application.") # default section descriptions option_description = Unicode(option_description) keyvalue_description = Unicode(keyvalue_description) subcommand_description = Unicode(subcommand_description) python_config_loader_class = PyFileConfigLoader json_config_loader_class = JSONFileConfigLoader # The usage and example string that goes at the end of the help string. examples = Unicode() # A sequence of Configurable subclasses whose config=True attributes will # be exposed at the command line. classes: t.List[t.Type[t.Any]] = [] def _classes_inc_parents(self, classes=None): """Iterate through configurable classes, including configurable parents :param classes: The list of classes to iterate; if not set, uses :attr:`classes`. Children should always be after parents, and each class should only be yielded once. """ if classes is None: classes = self.classes seen = set() for c in classes: # We want to sort parents before children, so we reverse the MRO for parent in reversed(c.mro()): if issubclass(parent, Configurable) and (parent not in seen): seen.add(parent) yield parent # The version string of this application. version = Unicode("0.0") # the argv used to initialize the application argv = List() # Whether failing to load config files should prevent startup raise_config_file_errors = Bool(TRAITLETS_APPLICATION_RAISE_CONFIG_FILE_ERROR) # The log level for the application log_level = Enum( (0, 10, 20, 30, 40, 50, "DEBUG", "INFO", "WARN", "ERROR", "CRITICAL"), default_value=logging.WARN, help="Set the log level by value or name.", ).tag(config=True) _log_formatter_cls = LevelFormatter log_datefmt = Unicode( "%Y-%m-%d %H:%M:%S", help="The date format used by logging formatters for %(asctime)s" ).tag(config=True) log_format = Unicode( "[%(name)s]%(highlevel)s %(message)s", help="The Logging format template", ).tag(config=True) def get_default_logging_config(self): """Return the base logging configuration. The default is to log to stderr using a StreamHandler, if no default handler already exists. The log handler level starts at logging.WARN, but this can be adjusted by setting the ``log_level`` attribute. The ``logging_config`` trait is merged into this allowing for finer control of logging. """ config: t.Dict[str, t.Any] = { "version": 1, "handlers": { "console": { "class": "logging.StreamHandler", "formatter": "console", "level": logging.getLevelName(self.log_level), "stream": "ext://sys.stderr", }, }, "formatters": { "console": { "class": ( f"{self._log_formatter_cls.__module__}" f".{self._log_formatter_cls.__name__}" ), "format": self.log_format, "datefmt": self.log_datefmt, }, }, "loggers": { self.__class__.__name__: { "level": "DEBUG", "handlers": ["console"], } }, "disable_existing_loggers": False, } if sys.executable and sys.executable.endswith("pythonw.exe"): # disable logging # (this should really go to a file, but file-logging is only # hooked up in parallel applications) del config["handlers"]["loggers"] return config @observe("log_datefmt", "log_format", "log_level", "logging_config") def _observe_logging_change(self, change): # convert log level strings to ints log_level = self.log_level if isinstance(log_level, str): self.log_level = getattr(logging, log_level) self._configure_logging() @observe("log", type="default") def _observe_logging_default(self, change): self._configure_logging() def _configure_logging(self): config = self.get_default_logging_config() nested_update(config, self.logging_config or {}) dictConfig(config) # make a note that we have configured logging self._logging_configured = True @default("log") def _log_default(self): """Start logging for this application.""" log = logging.getLogger(self.__class__.__name__) log.propagate = False _log = log # copied from Logger.hasHandlers() (new in Python 3.2) while _log: if _log.handlers: return log if not _log.propagate: break else: _log = _log.parent # type:ignore[assignment] return log logging_config = Dict( help=""" Configure additional log handlers. The default stderr logs handler is configured by the log_level, log_datefmt and log_format settings. This configuration can be used to configure additional handlers (e.g. to output the log to a file) or for finer control over the default handlers. If provided this should be a logging configuration dictionary, for more information see: https://docs.python.org/3/library/logging.config.html#logging-config-dictschema This dictionary is merged with the base logging configuration which defines the following: * A logging formatter intended for interactive use called ``console``. * A logging handler that writes to stderr called ``console`` which uses the formatter ``console``. * A logger with the name of this application set to ``DEBUG`` level. This example adds a new handler that writes to a file: .. code-block:: python c.Application.logging_configuration = { 'handlers': { 'file': { 'class': 'logging.FileHandler', 'level': 'DEBUG', 'filename': '<path/to/file>', } }, 'loggers': { '<application-name>': { 'level': 'DEBUG', # NOTE: if you don't list the default "console" # handler here then it will be disabled 'handlers': ['console', 'file'], }, } } """, ).tag(config=True) #: the alias map for configurables #: Keys might strings or tuples for additional options; single-letter alias accessed like `-v`. #: Values might be like "Class.trait" strings of two-tuples: (Class.trait, help-text). aliases: t.Dict[str, str] = {"log-level": "Application.log_level"} # flags for loading Configurables or store_const style flags # flags are loaded from this dict by '--key' flags # this must be a dict of two-tuples, the first element being the Config/dict # and the second being the help string for the flag flags: t.Dict[str, t.Any] = { "debug": ( { "Application": { "log_level": logging.DEBUG, }, }, "Set log-level to debug, for the most verbose logging.", ), "show-config": ( { "Application": { "show_config": True, }, }, "Show the application's configuration (human-readable format)", ), "show-config-json": ( { "Application": { "show_config_json": True, }, }, "Show the application's configuration (json format)", ), } # subcommands for launching other applications # if this is not empty, this will be a parent Application # this must be a dict of two-tuples, # the first element being the application class/import string # and the second being the help string for the subcommand subcommands = Dict() # parse_command_line will initialize a subapp, if requested subapp = Instance("traitlets.config.application.Application", allow_none=True) # extra command-line arguments that don't set config values extra_args = List(Unicode()) cli_config = Instance( Config, (), {}, help="""The subset of our configuration that came from the command-line We re-load this configuration after loading config files, to ensure that it maintains highest priority. """, ) _loaded_config_files = List() show_config = Bool( help="Instead of starting the Application, dump configuration to stdout" ).tag(config=True) show_config_json = Bool( help="Instead of starting the Application, dump configuration to stdout (as JSON)" ).tag(config=True) @observe("show_config_json") def _show_config_json_changed(self, change): self.show_config = change.new @observe("show_config") def _show_config_changed(self, change): if change.new: self._save_start = self.start self.start = self.start_show_config # type:ignore[assignment] def __init__(self, **kwargs): SingletonConfigurable.__init__(self, **kwargs) # Ensure my class is in self.classes, so my attributes appear in command line # options and config files. cls = self.__class__ if cls not in self.classes: if self.classes is cls.classes: # class attr, assign instead of insert self.classes = [cls] + self.classes else: self.classes.insert(0, self.__class__) @observe("config") @observe_compat def _config_changed(self, change): super()._config_changed(change) self.log.debug("Config changed: %r", change.new) @catch_config_error def initialize(self, argv=None): """Do the basic steps to configure me. Override in subclasses. """ self.parse_command_line(argv) def start(self): """Start the app mainloop. Override in subclasses. """ if self.subapp is not None: return self.subapp.start() def start_show_config(self): """start function used when show_config is True""" config = self.config.copy() # exclude show_config flags from displayed config for cls in self.__class__.mro(): if cls.__name__ in config: cls_config = config[cls.__name__] cls_config.pop("show_config", None) cls_config.pop("show_config_json", None) if self.show_config_json: json.dump(config, sys.stdout, indent=1, sort_keys=True, default=repr) # add trailing newline sys.stdout.write("\n") return if self._loaded_config_files: print("Loaded config files:") for f in self._loaded_config_files: print(" " + f) print() for classname in sorted(config): class_config = config[classname] if not class_config: continue print(classname) pformat_kwargs: t.Dict[str, t.Any] = dict(indent=4, compact=True) for traitname in sorted(class_config): value = class_config[traitname] print( " .{} = {}".format( traitname, pprint.pformat(value, **pformat_kwargs), ) ) def print_alias_help(self): """Print the alias parts of the help.""" print("\n".join(self.emit_alias_help())) def emit_alias_help(self): """Yield the lines for alias part of the help.""" if not self.aliases: return classdict = {} for cls in self.classes: # include all parents (up to, but excluding Configurable) in available names for c in cls.mro()[:-3]: classdict[c.__name__] = c for alias, longname in self.aliases.items(): try: if isinstance(longname, tuple): longname, fhelp = longname else: fhelp = None classname, traitname = longname.split(".")[-2:] longname = classname + "." + traitname cls = classdict[classname] trait = cls.class_traits(config=True)[traitname] fhelp = cls.class_get_trait_help(trait, helptext=fhelp).splitlines() if not isinstance(alias, tuple): alias = (alias,) # type:ignore[assignment] alias = sorted(alias, key=len) # type:ignore[assignment] alias = ", ".join(("--%s" if len(m) > 1 else "-%s") % m for m in alias) # reformat first line fhelp[0] = fhelp[0].replace("--" + longname, alias) yield from fhelp yield indent("Equivalent to: [--%s]" % longname) except Exception as ex: self.log.error("Failed collecting help-message for alias %r, due to: %s", alias, ex) raise def print_flag_help(self): """Print the flag part of the help.""" print("\n".join(self.emit_flag_help())) def emit_flag_help(self): """Yield the lines for the flag part of the help.""" if not self.flags: return for flags, (cfg, fhelp) in self.flags.items(): try: if not isinstance(flags, tuple): flags = (flags,) # type:ignore[assignment] flags = sorted(flags, key=len) # type:ignore[assignment] flags = ", ".join(("--%s" if len(m) > 1 else "-%s") % m for m in flags) yield flags yield indent(dedent(fhelp.strip())) cfg_list = " ".join( f"--{clname}.{prop}={val}" for clname, props_dict in cfg.items() for prop, val in props_dict.items() ) cfg_txt = "Equivalent to: [%s]" % cfg_list yield indent(dedent(cfg_txt)) except Exception as ex: self.log.error("Failed collecting help-message for flag %r, due to: %s", flags, ex) raise def print_options(self): """Print the options part of the help.""" print("\n".join(self.emit_options_help())) def emit_options_help(self): """Yield the lines for the options part of the help.""" if not self.flags and not self.aliases: return header = "Options" yield header yield "=" * len(header) for p in wrap_paragraphs(self.option_description): yield p yield "" yield from self.emit_flag_help() yield from self.emit_alias_help() yield "" def print_subcommands(self): """Print the subcommand part of the help.""" print("\n".join(self.emit_subcommands_help())) def emit_subcommands_help(self): """Yield the lines for the subcommand part of the help.""" if not self.subcommands: return header = "Subcommands" yield header yield "=" * len(header) for p in wrap_paragraphs(self.subcommand_description.format(app=self.name)): yield p yield "" for subc, (_, help) in self.subcommands.items(): yield subc if help: yield indent(dedent(help.strip())) yield "" def emit_help_epilogue(self, classes): """Yield the very bottom lines of the help message. If classes=False (the default), print `--help-all` msg. """ if not classes: yield "To see all available configurables, use `--help-all`." yield "" def print_help(self, classes=False): """Print the help for each Configurable class in self.classes. If classes=False (the default), only flags and aliases are printed. """ print("\n".join(self.emit_help(classes=classes))) def emit_help(self, classes=False): """Yield the help-lines for each Configurable class in self.classes. If classes=False (the default), only flags and aliases are printed. """ yield from self.emit_description() yield from self.emit_subcommands_help() yield from self.emit_options_help() if classes: help_classes = self._classes_with_config_traits() if help_classes: yield "Class options" yield "=============" for p in wrap_paragraphs(self.keyvalue_description): yield p yield "" for cls in help_classes: yield cls.class_get_help() yield "" yield from self.emit_examples() yield from self.emit_help_epilogue(classes) def document_config_options(self): """Generate rST format documentation for the config options this application Returns a multiline string. """ return "\n".join(c.class_config_rst_doc() for c in self._classes_inc_parents()) def print_description(self): """Print the application description.""" print("\n".join(self.emit_description())) def emit_description(self): """Yield lines with the application description.""" for p in wrap_paragraphs(self.description or self.__doc__ or ""): yield p yield "" def print_examples(self): """Print usage and examples (see `emit_examples()`).""" print("\n".join(self.emit_examples())) def emit_examples(self): """Yield lines with the usage and examples. This usage string goes at the end of the command line help string and should contain examples of the application's usage. """ if self.examples: yield "Examples" yield "--------" yield "" yield indent(dedent(self.examples.strip())) yield "" def print_version(self): """Print the version string.""" print(self.version) @catch_config_error def initialize_subcommand(self, subc, argv=None): """Initialize a subcommand with argv.""" subapp, _ = self.subcommands.get(subc) if isinstance(subapp, str): subapp = import_item(subapp) # Cannot issubclass() on a non-type (SOhttp://stackoverflow.com/questions/8692430) if isinstance(subapp, type) and issubclass(subapp, Application): # Clear existing instances before... self.__class__.clear_instance() # instantiating subapp... self.subapp = subapp.instance(parent=self) elif callable(subapp): # or ask factory to create it... self.subapp = subapp(self) # type:ignore[call-arg] else: raise AssertionError("Invalid mappings for subcommand '%s'!" % subc) # ... and finally initialize subapp. self.subapp.initialize(argv) def flatten_flags(self): """Flatten flags and aliases for loaders, so cl-args override as expected. This prevents issues such as an alias pointing to InteractiveShell, but a config file setting the same trait in TerminalInteraciveShell getting inappropriate priority over the command-line arg. Also, loaders expect ``(key: longname)`` and not ````key: (longname, help)`` items. Only aliases with exactly one descendent in the class list will be promoted. """ # build a tree of classes in our list that inherit from a particular # it will be a dict by parent classname of classes in our list # that are descendents mro_tree = defaultdict(list) for cls in self.classes: clsname = cls.__name__ for parent in cls.mro()[1:-3]: # exclude cls itself and Configurable,HasTraits,object mro_tree[parent.__name__].append(clsname) # flatten aliases, which have the form: # { 'alias' : 'Class.trait' } aliases: t.Dict[str, str] = {} for alias, longname in self.aliases.items(): if isinstance(longname, tuple): longname, _ = longname cls, trait = longname.split(".", 1) # type:ignore[assignment] children = mro_tree[cls] # type:ignore[index] if len(children) == 1: # exactly one descendent, promote alias cls = children[0] # type:ignore[assignment] if not isinstance(aliases, tuple): alias = (alias,) # type:ignore[assignment] for al in alias: aliases[al] = ".".join([cls, trait]) # type:ignore[list-item] # flatten flags, which are of the form: # { 'key' : ({'Cls' : {'trait' : value}}, 'help')} flags = {} for key, (flagdict, help) in self.flags.items(): newflag: t.Dict[t.Any, t.Any] = {} for cls, subdict in flagdict.items(): children = mro_tree[cls] # type:ignore[index] # exactly one descendent, promote flag section if len(children) == 1: cls = children[0] # type:ignore[assignment] if cls in newflag: newflag[cls].update(subdict) else: newflag[cls] = subdict if not isinstance(key, tuple): key = (key,) # type:ignore[assignment] for k in key: flags[k] = (newflag, help) return flags, aliases def _create_loader(self, argv, aliases, flags, classes): return KVArgParseConfigLoader(argv, aliases, flags, classes=classes, log=self.log) @catch_config_error def parse_command_line(self, argv=None): """Parse the command line arguments.""" assert not isinstance(argv, str) argv = sys.argv[1:] if argv is None else argv self.argv = [cast_unicode(arg) for arg in argv] if argv and argv[0] == "help": # turn `ipython help notebook` into `ipython notebook -h` argv = argv[1:] + ["-h"] if self.subcommands and len(argv) > 0: # we have subcommands, and one may have been specified subc, subargv = argv[0], argv[1:] if re.match(r"^\w(\-?\w)*$", subc) and subc in self.subcommands: # it's a subcommand, and *not* a flag or class parameter return self.initialize_subcommand(subc, subargv) # Arguments after a '--' argument are for the script IPython may be # about to run, not IPython iteslf. For arguments parsed here (help and # version), we want to only search the arguments up to the first # occurrence of '--', which we're calling interpreted_argv. try: interpreted_argv = argv[: argv.index("--")] except ValueError: interpreted_argv = argv if any(x in interpreted_argv for x in ("-h", "--help-all", "--help")): self.print_help("--help-all" in interpreted_argv) self.exit(0) if "--version" in interpreted_argv or "-V" in interpreted_argv: self.print_version() self.exit(0) # flatten flags&aliases, so cl-args get appropriate priority: flags, aliases = self.flatten_flags() classes = tuple(self._classes_with_config_traits()) loader = self._create_loader(argv, aliases, flags, classes=classes) try: self.cli_config = deepcopy(loader.load_config()) except SystemExit: # traitlets 5: no longer print help output on error # help output is huge, and comes after the error raise self.update_config(self.cli_config) # store unparsed args in extra_args self.extra_args = loader.extra_args @classmethod def _load_config_files(cls, basefilename, path=None, log=None, raise_config_file_errors=False): """Load config files (py,json) by filename and path. yield each config object in turn. """ if not isinstance(path, list): path = [path] for path in path[::-1]: # path list is in descending priority order, so load files backwards: pyloader = cls.python_config_loader_class(basefilename + ".py", path=path, log=log) if log: log.debug("Looking for %s in %s", basefilename, path or os.getcwd()) jsonloader = cls.json_config_loader_class(basefilename + ".json", path=path, log=log) loaded: t.List[t.Any] = [] filenames: t.List[str] = [] for loader in [pyloader, jsonloader]: config = None try: config = loader.load_config() except ConfigFileNotFound: pass except Exception: # try to get the full filename, but it will be empty in the # unlikely event that the error raised before filefind finished filename = loader.full_filename or basefilename # problem while running the file if raise_config_file_errors: raise if log: log.error("Exception while loading config file %s", filename, exc_info=True) else: if log: log.debug("Loaded config file: %s", loader.full_filename) if config: for filename, earlier_config in zip(filenames, loaded): collisions = earlier_config.collisions(config) if collisions and log: log.warning( "Collisions detected in {0} and {1} config files." " {1} has higher priority: {2}".format( filename, loader.full_filename, json.dumps(collisions, indent=2), ) ) yield (config, loader.full_filename) loaded.append(config) filenames.append(loader.full_filename) @property def loaded_config_files(self): """Currently loaded configuration files""" return self._loaded_config_files[:] @catch_config_error def load_config_file(self, filename, path=None): """Load config files by filename and path.""" filename, ext = os.path.splitext(filename) new_config = Config() for (config, filename) in self._load_config_files( filename, path=path, log=self.log, raise_config_file_errors=self.raise_config_file_errors, ): new_config.merge(config) if ( filename not in self._loaded_config_files ): # only add to list of loaded files if not previously loaded self._loaded_config_files.append(filename) # add self.cli_config to preserve CLI config priority new_config.merge(self.cli_config) self.update_config(new_config) def _classes_with_config_traits(self, classes=None): """ Yields only classes with configurable traits, and their subclasses. :param classes: The list of classes to iterate; if not set, uses :attr:`classes`. Thus, produced sample config-file will contain all classes on which a trait-value may be overridden: - either on the class owning the trait, - or on its subclasses, even if those subclasses do not define any traits themselves. """ if classes is None: classes = self.classes cls_to_config = OrderedDict( (cls, bool(cls.class_own_traits(config=True))) for cls in self._classes_inc_parents(classes) ) def is_any_parent_included(cls): return any(b in cls_to_config and cls_to_config[b] for b in cls.__bases__) # Mark "empty" classes for inclusion if their parents own-traits, # and loop until no more classes gets marked. # while True: to_incl_orig = cls_to_config.copy() cls_to_config = OrderedDict( (cls, inc_yes or is_any_parent_included(cls)) for cls, inc_yes in cls_to_config.items() ) if cls_to_config == to_incl_orig: break for cl, inc_yes in cls_to_config.items(): if inc_yes: yield cl def generate_config_file(self, classes=None): """generate default config file from Configurables""" lines = ["# Configuration file for %s." % self.name] lines.append("") classes = self.classes if classes is None else classes config_classes = list(self._classes_with_config_traits(classes)) for cls in config_classes: lines.append(cls.class_config_section(config_classes)) return "\n".join(lines) def close_handlers(self): if getattr(self, "_logging_configured", False): # don't attempt to close handlers unless they have been opened # (note accessing self.log.handlers will create handlers if they # have not yet been initialised) for handler in self.log.handlers: with suppress(Exception): handler.close() self._logging_configured = False def exit(self, exit_status=0): self.log.debug("Exiting application: %s" % self.name) self.close_handlers() sys.exit(exit_status) def __del__(self): self.close_handlers() @classmethod def launch_instance(cls, argv=None, **kwargs): """Launch a global instance of this Application If a global instance already exists, this reinitializes and starts it """ app = cls.instance(**kwargs) app.initialize(argv) app.start()