def get(self, nsid: Union[str, Nsid]) -> NamespaceNodeBase: """ Description: return a node object specified by NSID """ log = LoggerAdapter(logger, dict(name_ext=f"{self.__class__.__name__}.get")) self._validate_namespace_nsid_head(nsid) _nsid_ = Nsid(nsid) current_node = self.root nsid_segments = list_nsid_segments(nsid)[ 1:] #- skip initial root segment n = 0 while current_node.nsid != _nsid_: log.debug(f"target {_nsid_=} != {current_node.nsid=}") try: nsid_segment = nsid_segments[n] except IndexError as err: raise NamespaceInternalError( f"while looking for nsid \"{_nsid_}\", ran out of nsid_segments: {nsid_segments} at index {n}" ) from err try: current_node = getattr(current_node, nsid_segment) if not isinstance(current_node, NamespaceNodeBase): warn( "Rogue node type detected in the namespace. Will most likely cause errors." ) except AttributeError: raise NamespaceLookupError( f"{current_node} has no attribute named '{nsid_segment}'") n += 1 log.debug(f"found {_nsid_=} == {current_node.nsid=}") return current_node
def parse(self, dictConfig=None): ''' Input: dictConfig: the dictConfig that initializes the namespace ''' self._ns_roots = list() log = LoggerAdapter(logger, {'name_ext': 'NamespaceConfigParser.parse'}) log.debug("Beginning dictConfig parsing...") ns_collection = list() for key in dictConfig.keys(): if self.prefix: nsid = '.'.join([self.prefix, key]) else: nsid = key log.debug('creating new Namespace Root: {}'.format(nsid)) cur_ns = self.new_node(nsid) log.debug('appending {} to ns_collection'.format(cur_ns)) ns_collection.append(cur_ns) log.debug('Calling _parse_dictConfig_sub') self.parse_submap(dictConfig[key], cur_ns) log.debug('returning {}'.format(ns_collection)) return ns_collection
def _lookup_symbolic_ref(self, ref, follow_symrefs=True): """ Description: lookup a value in starting from NSROOT, instead of a value in this namespace node. (if nsroot is not set, we lookup from this node.) Input: ref: the symbolic reference to lookup follow: whether to follow links that lead to links or not """ log = LoggerAdapter(logger, {'name_ext': 'NamspaceNode._lookup_symbolic_ref'}) if self._nsroot is None: nsroot = self else: nsroot = self._nsroot log.debug("nsid ref: {}".format(ref)) #- strip the prefix nsid = get_nsid_from_ref(ref) ref = nsroot._lookup(nsid) if follow_symrefs: while is_nsid_ref(ref): log.debug("nsid ref: {}".format(ref)) nsid = get_nsid_from_ref(ref) ref = nsroot._lookup(nsid) #- ref no longer an nsid ref return ref
def __init__(self, collection_specs=None, group_specs=None, object_specs=None): """ Input: collection_specs: mapping to lookup collection model specs (unimplemented) group_specs: mapping to lookup group model specifications (unimplemented) object_specs: name to object spec mapping to look up the object model specifications """ log = LoggerAdapter(logger, {'name_ext': 'Render.__init__'}) log.debug("Entering") self._object_spec_map = object_specs #- set up Namespace for specs self.nsroot = NamespaceNode('.', is_nsroot=True) self.spec = self.nsroot._add_child('spec') self.spec._add_child('object') self._collection_spec_map = collection_specs self._group_spec_map = group_specs object_spec_config = load_yaml_file(filename=defaults.object_spec_file) object_specs_parser = ObjectSpecConfigParser(nsroot=self.nsroot) self.object_spec_map = object_specs_parser.parse(object_spec_config) log.debug('list(self.spec._all(nsids=True)):{}'.format(\ list(self.spec._all(nsids=True)))) self.init_object_spec_ns()
def __getattr__(self, attr): log = LoggerAdapter(logger, {'name_ext': 'FilteredCollection.__getattr__'}) log.debug("FilteredCollection.__getattr('{}')".format(attr)) eval_str = '.{}'.format(attr) log.debug("calling collective_eval({})".format(eval_str)) return self.collective_eval(eval_str)
def __getitem__(self, key): """ Description: Go through self and the sequence of maps to find the first match for the given key Input: key: key for the item to get """ log = LoggerAdapter(logger, {'name_ext': 'NsidChainMap.__getitem__'}) value = self._LOOKUP_FAIL_CANARY try: value = self.data[key] return value except KeyError: log.debug("{} not found in local dict".format(key)) for m_nsid in self.map_nsids: try: map = self.nsroot._lookup(m_nsid) except NamespaceLookupError: log.warning('Unable to lookup map: {}'.format(m_nsid)) try: value = map[key] break except KeyError: log.debug('KeyError in {}. Trying next...'.format(m_nsid)) continue if value == self._LOOKUP_FAIL_CANARY: self.__missing__(key) else: return value
def _parse_meta_factory_function_dynamic_bases(self, base_names: list) -> tuple: """ Description: takes the list of strings of class names and turns it into a tuple of type objects required before passing the bases to `type` builtin Input: bases: a list of strings of base class names Output: tuple of types created from the names TODO: this was straight copied from _parse_meta_factory_function_static. Refactor into a shared method call that can capture the similar logic for importing the module and getting the symbol as an object """ log = LoggerAdapter( logger, dict( name_ext= f'{self.__class__.__name__}._parse_meta_factory_function_dynamic_bases' )) bases = list() # will be returned value for basename in base_names: module = None #- the python module that has the class try: module_name = '.'.join(basename.split('.')[0:-1]) symbol_name = basename.split('.')[-1] module = import_module(module_name) except ValueError: #- the import_module call failed #- we have a name, but it might not have a dot at all, #- which would then try to import the empty string and #- fail with a ValueError #- try to use thewired as the base import lib name log.debug( "value error importing: \"{module_name}\". Defaulting to 'thewired'." ) module = import_module("thewired") finally: if module: try: cls = getattr(module, symbol_name) except AttributeError as err: log.debug( f"specified class ({symbol_name}) does not exist in specified module ({module_name})!" ) raise ValueError( f"\"{symbol_name} does not exist in {module_name}!" ) from err else: bases.append(cls) return tuple(bases)
def remove(self, nsid: Union[str, Nsid]) -> NamespaceNodeBase: log = LoggerAdapter( logger, dict(name_ext=f"{self.__class__.__name__}.remove: {self.prefix=}")) real_nsid = self.prefix + nsid log.debug(f"removing: {real_nsid=}") return self.ns.remove(real_nsid)
def __getitem__(self, key): log = LoggerAdapter(logger, {'name_ext' : 'ProviderMap.__getitem__'}) val = self.data.get(key, FAIL_CANARY) log.debug('data.get({}) returned: {}'.format(key, val)) if val == FAIL_CANARY: raise ProviderMapLookupError('No provider for {}'.format(key)) else: return val
def __iter__(self): """ Description: default iteration is an unfiltered shallow iteration """ log = LoggerAdapter(logger, {'name_ext': 'NamespaceNode.__iter__'}) log.debug("Default iterator invoked on {}".format(self._nsid)) return self._shallowiterator()
def _shallowiterator(self): """ Description: shallow iteration over all items in _ns_items """ log = LoggerAdapter(logger, {'name_ext': 'NamespaceNode._shallowiterator'}) log.debug("Shallow iterator invoked on {}".format(self._nsid)) return iter(self._all(nsids=False))
def __init__(self, nsid, alias_nsid, ns_items=None): log = LoggerAdapter(logger, {'name_ext': 'AliasNode.__init__'}) log.debug("Initializing Alias Node: {} --> {}".format(nsid, alias_nsid)) super().__init__(namespace_id=nsid, provider_map=None, ghost=None,\ ns_items=ns_items) self._alias_nsid = alias_nsid self._ns_items = ns_items
def add(self, nsid: Union[str, Nsid], *args, **kwargs) -> List[NamespaceNodeBase]: log = LoggerAdapter( logger, dict(name_ext=f"{self.__class__.__name__}.add: {self.prefix=}")) real_nsid = self.prefix + nsid log.debug(f"adding {real_nsid=}") return self.ns.add(real_nsid, *args, **kwargs)
def get(self, nsid: Union[str, Nsid]) -> NamespaceNodeBase: log = LoggerAdapter( logger, dict(name_ext=f"{self.__class__.__name__}.get: {self.prefix=}")) if nsid == self.delineator: real_nsid = self.prefix else: real_nsid = self.prefix + nsid log.debug(f"getting {real_nsid=}") return self.ns.get(real_nsid)
async def putflag_test(task: PutflagCheckerTaskMessage, session_a: AsyncClient, session_b: AsyncClient, db: ChainDB, logger: LoggerAdapter) -> None: await register_user(session_a) logger.debug("registered user for session_a") (username_b, password_b) = await register_user(session_b) logger.debug("registered user for session_b") await send_message(session_a, username_b, task.flag) await db.set("credentials", (username_b, password_b))
def __call__(self, *args, **kwargs): log = LoggerAdapter(logger, {'name_ext': 'NamespaceNode.__call__'}) log.debug("args: {}".format(args)) log.debug("kwargs: {}".format(kwargs)) if self._provider_map is None: raise TypeError('{} object is not callable'.format( self.__class__.__name__)) else: provider = self._provider_map.get_provider('__call__') self._ghost = provider(*args, **kwargs) return self._ghost
def __setitem__(self, key, value): """ Description: set local dict [key] = value Input: key: key name value: value to set for key """ log = LoggerAdapter(logger, {'name_ext': 'NsidChainMap.__setitem__'}) log.debug("setting local dict {} to {}".format(key, value)) self.data[key] = value
def init_object_spec_ns(self, file=defaults.object_spec_file, root=None): """ Parse the object spec config file into self.spec.object namespace """ log = LoggerAdapter(logger, {'name_ext': 'Render.init_object_spec_ns'}) root = self.spec.object if root is None else root dictConfig = load_yaml_file(filename=file) parser = ObjectSpecConfigParser(nsroot=self.spec) ns_roots = parser.parse(dictConfig) log.debug(f"object spec ns roots: {ns_roots}") for ns_x in ns_roots: root._add_ns(ns_x)
def __init__(self, source_object, spec, colors=None, prologue=None): """source_object: the object that is being modeled spec: the ObjectModelSpec object that is used to build this model colors: list of colors to use for attributes prologue: ColoredText type for things to place in output stream before the object rendering output """ log = LoggerAdapter(logger, {'name_ext' : 'ObjectModel.__init__'}) log.debug("Entering") self.source_object = source_object self.attribute_models = self.make_attribute_models_from_spec(spec) _colors = self.get_colors_from_spec(spec) self.set_colors(_colors) if colors: log.debug("Overriding spec colors with parameter colors") self.set_colors(colors) self.delimiter = self.get_delimiter_from_spec(spec) self.delimiter_colors = self.get_delimiter_colors_from_spec(spec) #- stuff that displays prepended to the object display log.debug("setting prologue: '{}'".format(prologue)) self.prologue = prologue log.debug("Exiting")
def get_render_data(self): """ Description: return self as a collection of ColoredText instances """ log = LoggerAdapter(logger, {'name_ext' : '{}.get_render_data'.format(\ self.__class__.__name__)}) outputs = list() outputs.append(self.prologue) outputs.append(ColoredText(self.text, self.color)) outputs.append(self.epilogue) ard = AttributeRenderDatum(*outputs) log.debug("returning: {}".format(ard)) return ard
def build_formatter_callable(self): """ Description: Encapsulate some logic needed to create the callable used as the Attribute Formatter callable. """ log = LoggerAdapter( logger, {'name_ext': 'AttributeModel.build_formatter_callable'}) log.debug("entering: {}".format(self)) if self.uses_named_render_method(): try: log.debug( 'Getting reference to formatter callable from formatter name' ) #- TODO: instantiate / get references to named arguments render_method = functools.partial(eval(self.render_method_name),\ source=self.source_object) except NameError: log.error( "render_method_name: '{}' seems invalid. Skipping render_method" ) render_method = None else: log.debug( 'Getting reference to formatter callable from explicit callable' ) render_method = self.render_method log.debug("exiting") return render_method
def get_params(cls, map, *args, **kwargs): """ Description: get the raw parameters from a formatted params map Input: map: the params map *args: ignored **kwargs: overlay parameters Ouput: a 2-tuple of (method_name, params dict) """ log = LoggerAdapter(logger, {'name_ext': 'ParametizedCall.get_params'}) log.debug("Entered") log.debug("kwargs: {}".format(str(kwargs))) params_marker = cls._param_dict_mark_key params = copy.deepcopy(map[params_marker]) log.debug("raw params: {}".format(params)) method_name = params['defaults'].pop('method_name') param_chain = collections.ChainMap(kwargs, params['defaults']) param_set_name = kwargs.pop('_params', None) if param_set_name: param_chain.maps.insert(1, params[param_set_name]) log.debug("Exiting") return (method_name, param_chain)
def _lookup(self, namespace_id, follow_symrefs=True): """ Description: Get an object in the namespace by its namespace id Input: namespace_id: id of the object to retrieve follow_symrefs: whether or not to try to perform a deep lookup deep lookups can contain other NSIDs which will be looked up in turn until a final value is found and returned. For this feature to be enabled, this node's ._nsroot attribute must also be a valid NamespaceNode-like object that supports a lookup() method that will be passed an NSID. Output: item if found, else NamespaceLookupError raised """ log = LoggerAdapter(logger, {'name_ext': 'NamespaceNode._lookup'}) log.debug('[{}] lookup([{}])'.format(self._nsid, namespace_id)) obj = None if follow_symrefs and is_nsid_ref(namespace_id): value = self._lookup_symbolic_ref(namespace_id) return value else: #- split the NSID by path seperator (normally a dot) path = self._name_to_path(namespace_id) #- fully qualified NSID or not? if self._nsroot and path[0] == self._nsroot._nsid: #- lookup fully qualified NSIDs using the root node next_nsid = '.'.join(path[1:]) return self._nsroot.lookup(next_nsid, follow_symrefs=follow_symrefs) else: #- lookup relative NSIDs iteratively from current obj = self for name in path: try: obj = getattr(obj, name) if follow_symrefs and is_nsid_ref(obj): return self._lookup_symbolic_ref(obj) except AttributeError as err: log.error( 'thewired Failed to find value for [{}] in [{}]'. format(namespace_id, self._nsid)) raise NamespaceLookupError("{}.{}".format( self._nsid, namespace_id)) from err return obj
def _list_leaves(self, nsids=False, cur_nsid=None): """ Description: return a list of all the leaf nodes Input: nsids: - True: return a list of pairs of (nsid, leaf_node) - False: return a list of leaf_node's """ log = LoggerAdapter(logger, {'name_ext': 'NamespaceNode._list_leaves'}) log.debug("invoked on: {} | cur_nsid: {} ".format( self._nsid, cur_nsid)) leaves = list() for nsid, ns_item in self._all(nsids=True): try: #- if an object has this method, we don't consider it a leaf itself next_leaves = ns_item._list_leaves(nsids=nsids, cur_nsid=nsid) log.debug("extending with {}".format(next_leaves)) leaves += next_leaves except (TypeError, AttributeError): log.debug("leaf found: {}".format(ns_item)) if nsids: leaf = (nsid, ns_item) else: leaf = ns_item leaves.append(leaf) log.debug("generated leaves: {}".format(leaves)) return leaves
def __getattr__(self, attr): """ Description: called when attribute lookup fails. used to implement semantics for provided attributes. If Python calls this method we will use this class' provider_map to get the provider for this attribute access and then call the provider and return the provider's return value as the value of the attribute Input: attr: the name of the attribute that wasn't found via the normal Python attribute lookup mechanisms. Output: Output of the provider returned by the provider factory of this class. """ log = LoggerAdapter(logger, {'name_ext': 'NamespaceNode.__getattr__'}) log.debug('{}.__getattr__({})'.format(self._nsid, attr)) if self._provider_map: try: if callable(self._provider_map[attr]): self._ghost = self._provider_map[attr]() #- TODO: allow a string to be set. Use more specific logic #- TODO: to determine if this is an NSID elif isinstance(provider_map[attr], str): #- treat as NSID try: provider = self.provider_ns._lookup( self._provider_map[attr]) self._ghost = provider() except NamespaceLookupError as err: raise ProviderError from err except TypeError as err: log.error( f'{self._nsid}.{attr}: provider not callable') except ProviderError as err: log.error('{}: provider error: {}'.format(self._nsid, err)) except ProviderMapLookupError as err: log.error('No mapped provider for {}.{}'.format( self._nsid, attr)) raise AttributeError from err log.debug('{}.{} provider returned: {}'.format( self._nsid, attr, self._ghost)) else: raise AttributeError('{} object has no provider_map and no attribute \'{}\''.format(\ self.__class__.__name__, attr)) return self._ghost
def _create_node_factory_param_object( self, dictConfig: dict) -> Union[object, None]: """ Description: instantiates objects defined inside of node factory init function parameters these objects are needed in order to pass in as params to the node factory function Input: dictConfig: the config we are parsing Output: a parameter object instantiated as specified in the config via the meta keys """ log = LoggerAdapter( logger, dict( name_ext= f'{self.__class__.__name__}._create_node_factory_param_objects' )) log.debug(f"create_node_factory_param_object: {dictConfig=}") if not dictConfig: return None try: keys = dictConfig.keys() except AttributeError: #- no .keys(), dictConfig is no longer a mapping type return None #- parse out the function from __class__ factory_function = self._parse_meta_factory_function(dictConfig) #- parse the parameters and instantiate the objects init_params = dict() try: init_param_names = dictConfig['__init__'].keys() for init_param_name in init_param_names: try: init_param_keys = dictConfig['__init__'][ init_param_name].keys() #- this init param itself requires an init param #- TODO pass except AttributeError: #- this init param is not a mapping type init_params = dictConfig['__init__'][init_param_name] except KeyError: #- no '__init__' key return dict()
def get_subnodes(self, start_node_nsid): log = LoggerAdapter( logger, dict(name_ext= f"{self.__class__.__name__}.get_subnodes: {self.prefix=}")) log.debug(f"{start_node_nsid=}") start_node = self.get(start_node_nsid) for attr_name in dir(start_node): attr = getattr(start_node, attr_name) if isinstance(attr, NamespaceNodeBase): yield attr next_nsid = '.' + self.strip_prefix(str(attr.nsid)) log.debug(f"{next_nsid=}") yield from self.get_subnodes(next_nsid)
def _sanitize_nsid(nsid): """ Description: take a proposed NSID and remove consecutive dots Input: unsanitized nsid string Output: standard-conformant nsid string """ logname = {'name_ext': 'NamespaceNode._sanitize_nsid'} log = LoggerAdapter(logger, logname) sanitized_nsid = re.sub("\.\.+", ".", nsid) if sanitized_nsid != nsid: log.debug("Sanitized NSID: {} ---> {}".format(\ nsid, sanitized_nsid)) return sanitized_nsid
def _provider_map(self, mapping): """ Description: implement setting the provider map for this object Restrictions: * All values for the provider map must inherit from Provider ABC Raises: ValueError if any value is not an instance of Provider ABC """ log = LoggerAdapter(logger, {'name_ext': 'NamespaceNode.provider_map setter'}) log.debug('setting providers for NamespaceNode {}'.format(self._nsid)) self.__provider_map = ProviderMap(mapping=mapping)
def parse(self, dictConfig: dict, prefix: str = '') -> Union[Namespace, None]: """ Description: parse a configDict into a Namespace object Input: configDict - the configuration file parsed into a dictionary prefix - the rolling prefix for this parse, used to collect when recursively called namespace - what namespace to add the new nodes parsed to (if not specified, will use namespace_factory to create a new one) namespace_factory - creates a new namespace object when an existing one is not passed in via `namespace` Only tested with thewired.namespace.Namespace class ATM Output: a namespace object representing the nodes specifed in the dictConfig object """ log = LoggerAdapter(logger, dict(name_ext=f'{self.__class__.__name__}.parse')) log.debug(f"enter: {prefix=} {dictConfig=}") ns = self.ns try: dictConfig.keys() except (AttributeError, TypeError): return None #- create namespace as dictConfig describes for key in dictConfig.copy().keys(): if key in self._input_mutator_targets: dictConfig, key = self._input_mutator(dictConfig, key) #- NB: meta keys can not be top level keys with this current pattern if key not in self.meta_keys: log.debug(f"parsing {key=}") node_factory = self._create_factory(dictConfig[key], self.default_node_factory) if node_factory: new_node_nsid = nsid.make_child_nsid(prefix, key) log.debug(f"{new_node_nsid=}") new_node = ns.add_exactly_one(new_node_nsid, node_factory) if isinstance(dictConfig[key], Mapping): self.parse(dictConfig=dictConfig[key], prefix=new_node_nsid) else: log.debug( f"setting {new_node.nsid}.{key} to {dictConfig[key]}" ) setattr(new_node, key, dictConfig[key]) return ns