Ejemplo n.º 1
0
    def __init__(self,
                 *,
                 name=None,
                 attr_name='',
                 parent=None,
                 labels=None,
                 kind=None):
        if labels is None:
            labels = set()
        self._ophyd_labels_ = set(labels)
        if kind is None:
            kind = Kind.normal
        self.kind = kind

        super().__init__()

        # base name and ref to parent, these go with properties
        if name is None:
            name = ''
        self._attr_name = attr_name
        if not isinstance(name, str):
            raise ValueError("name must be a string.")
        self._name = name
        self._parent = parent

        self.subscriptions = {
            getattr(self, k)
            for k in dir(type(self))
            if (k.startswith('SUB') or k.startswith('_SUB'))
        }

        # dictionary of wrapped callbacks
        self._callbacks = {k: {} for k in self.subscriptions}
        # this is to maintain api on clear_sub
        self._unwrapped_callbacks = {k: {} for k in self.subscriptions}
        # map cid -> back to which event it is in
        self._cid_to_event_mapping = dict()
        # cache of last inputs to _run_subs, the semi-private way
        # to trigger the callbacks for a given subscription to be run
        self._args_cache = {k: None for k in self.subscriptions}
        # count of subscriptions we have handed out, used to give unique ids
        self._cb_count = count()
        # Create logger name from parent or from module class
        if self.parent:
            base_log = self.parent.log.name
            name = self.name.lstrip(self.parent.name + '_')
        else:
            base_log = self.__class__.__module__
            name = self.name
        self.log = LoggerAdapter(logger, {
            'base_log': base_log,
            'ophyd_object_name': name
        })
        self.control_layer_log = LoggerAdapter(control_layer_logger,
                                               {'ophyd_object_name': name})

        if not self.__any_instantiated:
            self.log.info("first instance of OphydObject: id=%s", id(self))
            OphydObject._mark_as_instantiated()
        self.__register_instance(self)
Ejemplo n.º 2
0
 def __getattr__(self, attr):
     log = LoggerAdapter(logger,
                         {'name_ext': 'FilteredCollection.__getattr__'})
     log.debug("FilteredCollection.__getattr('{}')".format(attr))
     eval_str = '.{}'.format(attr)
     log.debug("calling collective_eval({})".format(eval_str))
     return self.collective_eval(eval_str)
Ejemplo n.º 3
0
def load_formatters(path=None, prefix='kaleidoscope.formatter'):
    log_name = '{}.load_formatters'.format(__name__)
    log = LoggerAdapter(logger, {'name_ext': log_name})

    log.debug("Loading formatters...")

    if path is None:
        path = [os.path.split(__file__)[0]]

    if prefix[-1] != '.':
        prefix += '.'

    log.debug("Walking packages. path: {} | prefix: {}".format(path, prefix))
    all_module_infos = list(pkgutil.walk_packages(path=path, prefix=prefix))
    log.debug("Package Walk generated {} ModInfos: {}".format(
        len(all_module_infos), all_module_infos))

    all_pkgs = filter(lambda x: x.ispkg, all_module_infos)
    all_modules = itertools.filterfalse(lambda x: x.ispkg, all_module_infos)

    successful_imports = list(all_pkgs)

    for modinfo in all_modules:
        try:
            new_mod = importlib.import_module(modinfo.name)
            successful_imports.append(new_mod)
        except ImportError as err:
            log.warning("Failed to import formatter module: {}: {}".format(\
                modinfo.name, err))

    return successful_imports
Ejemplo n.º 4
0
def initialize_request_logger(app, **kwargs):
    '''Creates an augmented logger that lasts for the lifetime of a flask request. Enables
    additional data to be logged during the request context including username, user id,
    the user's ip address and a unique request-specific ID that can be used to debug all the
    messages associated with a specific request by a user.
    '''

    # Generate a unique Request ID
    request_id = uuid4()
    username = ''
    user_id = -1
    ip_address = get_remote_ip_address()

    if current_user.is_authenticated:
        username = current_user.username
        user_id = current_user.id

    log_fields = {
        'username': username,
        'ip_address': ip_address,
        'request_id': str(request_id),
        'user_id': user_id,
    }

    request_logger = LoggerAdapter(LOG, log_fields)
    g.request_logger = request_logger
    g.request_id = request_id
Ejemplo n.º 5
0
    def walk(self,
             start: Union[NamespaceNodeBase, None] = None,
             walk_dict: Union[Dict, None] = None) -> Union[Dict, object]:
        """
        Description:
            walk the namespace nodes
        Output:
            Dictionary representing the namespace's structure
        """
        log = LoggerAdapter(logger,
                            dict(name_ext=f"{self.__class__.__name__}.walk"))

        if start is None:
            start = self.root

        if walk_dict is None:
            walk_dict = dict()

        if not isinstance(start, NamespaceNodeBase):
            return start

        key = nsid_basename(start.nsid.nsid)
        walk_dict[key] = dict()

        for attr_name in dir(start):
            if not attr_name.startswith('_') and not attr_name == "nsid":
                attr = getattr(start, attr_name)
                updated_dict = self.walk(start=attr, walk_dict=walk_dict[key])

                if not isinstance(updated_dict, dict):
                    walk_dict[key][attr_name] = attr
                else:
                    walk_dict[key].update(updated_dict)

        return walk_dict
Ejemplo n.º 6
0
    def __getitem__(self, key):
        """
        Description:
            Go through self and the sequence of maps to find the first match for the given
            key

        Input:
            key: key for the item to get
        """
        log = LoggerAdapter(logger, {'name_ext': 'NsidChainMap.__getitem__'})
        value = self._LOOKUP_FAIL_CANARY
        try:
            value = self.data[key]
            return value
        except KeyError:
            log.debug("{} not found in local dict".format(key))
            for m_nsid in self.map_nsids:
                try:
                    map = self.nsroot._lookup(m_nsid)
                except NamespaceLookupError:
                    log.warning('Unable to lookup map: {}'.format(m_nsid))

                try:
                    value = map[key]
                    break
                except KeyError:
                    log.debug('KeyError in {}. Trying next...'.format(m_nsid))
                    continue

            if value == self._LOOKUP_FAIL_CANARY:
                self.__missing__(key)
            else:
                return value
Ejemplo n.º 7
0
def get_instance_logger(
    instance_name: str, host: str = "", port: int = 0, uid: str = ""
) -> LoggerAdapter:
    """
    Get an adapted logger instance for a given instance (driver/channel/transport)

    Args:
        instance_name: logger/instance name, i.e. "scrapli.driver"
        host: host to add to logging extras if applicable
        port: port to add to logging extras if applicable
        uid: unique id for a logging instance

    Returns:
        LoggerAdapter: adapter logger for the instance

    Raises:
        N/A

    """
    extras = {}

    if host and port:
        extras["host"] = host
        extras["port"] = str(port)

    if uid:
        extras["uid"] = uid

    _logger = getLogger(instance_name)
    return LoggerAdapter(_logger, extra=extras)
Ejemplo n.º 8
0
    def _lookup_symbolic_ref(self, ref, follow_symrefs=True):
        """
        Description:
            lookup a value in starting from NSROOT, instead of a value in this namespace
            node. (if nsroot is not set, we lookup from this node.)
        Input:
            ref: the symbolic reference to lookup
            follow: whether to follow links that lead to links or not
        """
        log = LoggerAdapter(logger,
                            {'name_ext': 'NamspaceNode._lookup_symbolic_ref'})
        if self._nsroot is None:
            nsroot = self

        else:
            nsroot = self._nsroot

        log.debug("nsid ref: {}".format(ref))
        #- strip the prefix
        nsid = get_nsid_from_ref(ref)
        ref = nsroot._lookup(nsid)

        if follow_symrefs:
            while is_nsid_ref(ref):
                log.debug("nsid ref: {}".format(ref))
                nsid = get_nsid_from_ref(ref)
                ref = nsroot._lookup(nsid)
            #- ref no longer an nsid ref

        return ref
Ejemplo n.º 9
0
    def authenticate(
        request: HttpRequest,
        username: str = None,
        password: str = None,
        code: str = None,
        token_provider: TokenProviderBase = TokenProvider(),
    ) -> Union[UserModel, None]:
        logger = LoggerAdapter(
            getLogger(__package__),
            get_extra(
                "backends.OAuthClientBackend",
                request.session["OAC_CLIENT_IP"],
                request.session["OAC_STATE_STR"],
            ),
        )
        try:
            token = token_provider.create(code)
        except NoUserError as e_info:
            logger.info(f"raised django_oac.exceptions.NoUserError: {e_info}")
            return None
        else:
            user = token.user

            logger.info(f"user '{user}' authenticated")
            return user
Ejemplo n.º 10
0
    def render_object_from_specname(self, obj, specname, align=True):
        """
        Description:
            Render an object by looking up a spec object by name
        Input:
            obj: object to be rendered
            spec_name: spec_name to lookup and use to render object
            colors: group colors (per-object colors)
        Notes:
            grabs the ObjectModelSpec and calls render_object_from_spec
        """
        log = LoggerAdapter(logger,
                            {'name_ext': 'Render.render_obj_from_specname'})
        log.debug("Entering")

        #- create a chain map from all the spec nodes in the namespace
        specmaps = list()
        nsid = specname
        while nsid:
            specmaps.append(self.spec.object._lookup(nsid).specmap)
            nsid = '.'.join(nsid.split('.')[0:-1])
        spec_chain = collections.ChainMap(*specmaps)
        log.debug(
            "Creating ObjectModelSpec from spec.object namespace with ChainMap"
        )
        log.debug("  ChainMap keys: {}".format(spec_chain.keys()))

        spec = ObjectModelSpec(**spec_chain)
        return self.render_object_from_spec(obj, spec, align=align)
Ejemplo n.º 11
0
    def get_params(cls, map, *args, **kwargs):
        """
        Description:
            get the raw parameters from a formatted params map
        Input:
            map:
                the params map
            *args: ignored
            **kwargs: overlay parameters
        Ouput: a 2-tuple of (method_name, params dict)
        """

        log = LoggerAdapter(logger, {'name_ext': 'ParametizedCall.get_params'})
        log.debug("Entered")
        log.debug("kwargs: {}".format(str(kwargs)))

        params_marker = cls._param_dict_mark_key

        params = copy.deepcopy(map[params_marker])
        log.debug("raw params: {}".format(params))
        method_name = params['defaults'].pop('method_name')
        param_chain = collections.ChainMap(kwargs, params['defaults'])
        param_set_name = kwargs.pop('_params', None)
        if param_set_name:
            param_chain.maps.insert(1, params[param_set_name])

        log.debug("Exiting")
        return (method_name, param_chain)
Ejemplo n.º 12
0
    def __init__(self, options, infos, params):
        """Init method."""

        self.infos = infos

        self.old_status = self.infos["status"]["check_status"]
        self.old_status_infos = self.infos["status"]["status_infos"]
        self.infos["status"]["status_infos"] = {}

        logger_per_job = logging.getLogger(
            "spvd.jobs.%s.%s" % (self.infos["check"]["plugin"],
                                 self.infos["check"]["plugin_check"]))

        if options.nodaemon:
            logger = logging.getLogger("spvd.jobs")
        else:
            logger = logger_per_job

        # critical section around logger.handlers
        global __handler_lock__
        with __handler_lock__:
            if len(logger.handlers) == 0:
                if options.nodaemon:
                    log_handler = logging.StreamHandler(sys.stdout)
                else:
                    log_dir = options.logdir + "/" + self.infos["check"][
                        "plugin"]
                    if os.path.exists(log_dir) is False:
                        os.mkdir(log_dir)
                    log_file = "%s/%s.log" % (
                        log_dir,
                        self.infos["check"]["plugin_check"],
                    )
                    log_handler = logging.FileHandler(log_file)

                formatter_string = (
                    "%(asctime)s %(levelname)-8s %(statusid)5s " +
                    "%(plugin)s:%(check)s %(group)s %(object)s : %(message)s")
                log_handler.setFormatter(logging.Formatter(formatter_string))
                logger.addHandler(log_handler)

                if params.get("debug", False):
                    logger.setLevel(logging.DEBUG)
                else:
                    logger.setLevel(logging.INFO)

                logger.propagate = False

        # Jobs will always use logger_per_job here, even in nodaemon mode,
        # since "spvd.jobs" will trap all log messages in that case.
        self.log = LoggerAdapter(
            logger_per_job,
            {
                "plugin": self.infos["check"]["plugin"],
                "check": self.infos["check"]["plugin_check"],
                "statusid": "#" + str(self.infos["status"]["status_id"]),
                "group": self.infos["group"]["name"],
                "object": self.infos["object"]["address"],
            },
        )
Ejemplo n.º 13
0
def authenticate_view(request: HttpRequest) -> HttpResponse:
    state_str = uuid4().hex
    client_ip, _ = get_client_ip(request)

    if request.session.get("OAC_STATE_STR") != "test":
        request.session["OAC_STATE_STR"] = state_str
        request.session["OAC_STATE_TIMESTAMP"] = timezone.now().timestamp()
        request.session["OAC_CLIENT_IP"] = client_ip or "unknown"

    logger = LoggerAdapter(
        getLogger(__package__),
        get_extra(
            "views.authenticate_view",
            request.session["OAC_CLIENT_IP"],
            request.session["OAC_STATE_STR"],
        ),
    )
    logger.info("authentication request")

    try:
        ret = redirect(f"{oac_settings.AUTHORIZE_URI}"
                       f"?scope=openid"
                       f"&client_id={oac_settings.CLIENT_ID}"
                       f"&redirect_uri={oac_settings.REDIRECT_URI}"
                       f"&state={state_str}"
                       "&response_type=code")
    except ConfigurationError as err:
        logger.error(str(err))
        ret = render(
            request,
            TEMPLATES_DIR / "500.html",
            {"message": "App config is incomplete, cannot continue."},
            status=500,
        )
    return ret
Ejemplo n.º 14
0
    def render_view(self, render_prologue=True):
        """Returns a GroupView"""
        log = LoggerAdapter(logger, {'name_ext': 'GroupModel.render_view'})
        log.debug("Entering")

        #- get number of digits in the number of object models
        max_line_num_strlen = len(str(len(self.object_models)))
        log.debug("max_line_num_strlen: {}".format(max_line_num_strlen))
        next_color = None

        #- keep track of all the lengths of the attributes
        #- across objects (so we can line them all up when we render a view)
        attr_maxlens = dict()

        for n, object_model_x in enumerate(self.object_models):
            #- override object model colors with group colors, if set
            next_color = self.get_next_color()
            if next_color:
                log.debug(
                    "setting object model color to next_color: {}".format(
                        next_color))
                object_model_x.set_colors([next_color], match_delimiter=True)

            if render_prologue:
                #- add the group sequence index as leading output of the object view
                index_str = '{current_index: <{max_line_num_strlen}}: '.format(\
                    current_index=str(n), max_line_num_strlen=max_line_num_strlen)

                log.debug("index_str: '{}'".format(index_str))
                #- match proglogue color to object color
                prologue = ColoredText(index_str, next_color)
                object_model_x.prologue = prologue

            if self.align:
                #- code to line up the attribute lengths across objects
                for attribute_model_x in object_model_x.attribute_models:
                    attr_name = attribute_model_x.name
                    attr_width = attribute_model_x.get_width()
                    log.debug(
                        f"attr_name:'{attr_name}' | attr_width: {attr_width}")
                    try:
                        attr_maxlens[attr_name] = max(attr_maxlens[attr_name],
                                                      attr_width)

                    except KeyError:
                        attr_maxlens[attr_name] = attr_width

        object_views = list()
        for object_model_x in self.object_models:
            if self.align:
                #- dynamically align attribute lengths
                #- TODO: if each attribute has a defined length, skip this
                for attribute_model_x in object_model_x.attribute_models:
                    attribute_model_x.length = attr_maxlens[
                        attribute_model_x.name]
            object_views.append(object_model_x.render_view())

        groupView = GroupView(object_views=object_views)
        log.debug("Returning groupView: {}".format(groupView))
        return groupView
Ejemplo n.º 15
0
    def __init__(self, source_object, spec, colors=None, prologue=None):
        """source_object: the object that is being modeled
        spec: the ObjectModelSpec object that is used to build this model
        colors: list of colors to use for attributes
        prologue: ColoredText type for things to place in output stream before the object
            rendering output
        """
        log = LoggerAdapter(logger, {'name_ext' : 'ObjectModel.__init__'})
        log.debug("Entering")

        self.source_object = source_object
        self.attribute_models = self.make_attribute_models_from_spec(spec)
        _colors = self.get_colors_from_spec(spec)
        self.set_colors(_colors)
        if colors:
            log.debug("Overriding spec colors with parameter colors")
            self.set_colors(colors)

        self.delimiter = self.get_delimiter_from_spec(spec)
        self.delimiter_colors = self.get_delimiter_colors_from_spec(spec)

        #- stuff that displays prepended to the object display
        log.debug("setting prologue: '{}'".format(prologue))
        self.prologue = prologue
        log.debug("Exiting")
Ejemplo n.º 16
0
    def _list_leaves(self, nsids=False, cur_nsid=None):
        """
        Description:
            return a list of all the leaf nodes
        Input:
            nsids:
                - True: return a list of pairs of (nsid, leaf_node)
                - False: return a list of leaf_node's
        """
        log = LoggerAdapter(logger, {'name_ext': 'NamespaceNode._list_leaves'})
        log.debug("invoked on: {} | cur_nsid: {} ".format(
            self._nsid, cur_nsid))
        leaves = list()

        for nsid, ns_item in self._all(nsids=True):
            try:
                #- if an object has this method, we don't consider it a leaf itself
                next_leaves = ns_item._list_leaves(nsids=nsids, cur_nsid=nsid)
                log.debug("extending with {}".format(next_leaves))
                leaves += next_leaves

            except (TypeError, AttributeError):
                log.debug("leaf found: {}".format(ns_item))
                if nsids:
                    leaf = (nsid, ns_item)
                else:
                    leaf = ns_item
                leaves.append(leaf)

        log.debug("generated leaves: {}".format(leaves))
        return leaves
Ejemplo n.º 17
0
    def __init__(self,
                 collection_specs=None,
                 group_specs=None,
                 object_specs=None):
        """
        Input:
            collection_specs: mapping to lookup collection model specs (unimplemented)
            group_specs: mapping to lookup group model specifications (unimplemented)
            object_specs: name to object spec mapping to look up the object model specifications
        """
        log = LoggerAdapter(logger, {'name_ext': 'Render.__init__'})
        log.debug("Entering")
        self._object_spec_map = object_specs

        #- set up Namespace for specs
        self.nsroot = NamespaceNode('.', is_nsroot=True)
        self.spec = self.nsroot._add_child('spec')
        self.spec._add_child('object')

        self._collection_spec_map = collection_specs
        self._group_spec_map = group_specs

        object_spec_config = load_yaml_file(filename=defaults.object_spec_file)
        object_specs_parser = ObjectSpecConfigParser(nsroot=self.nsroot)
        self.object_spec_map = object_specs_parser.parse(object_spec_config)
        log.debug('list(self.spec._all(nsids=True)):{}'.format(\
            list(self.spec._all(nsids=True))))
        self.init_object_spec_ns()
Ejemplo n.º 18
0
    def get(self, nsid: Union[str, Nsid]) -> NamespaceNodeBase:
        """
        Description:
            return a node object specified by NSID
        """
        log = LoggerAdapter(logger,
                            dict(name_ext=f"{self.__class__.__name__}.get"))
        self._validate_namespace_nsid_head(nsid)
        _nsid_ = Nsid(nsid)
        current_node = self.root
        nsid_segments = list_nsid_segments(nsid)[
            1:]  #- skip initial root segment

        n = 0
        while current_node.nsid != _nsid_:
            log.debug(f"target {_nsid_=} != {current_node.nsid=}")
            try:
                nsid_segment = nsid_segments[n]
            except IndexError as err:
                raise NamespaceInternalError(
                    f"while looking for nsid \"{_nsid_}\", ran out of nsid_segments: {nsid_segments} at index {n}"
                ) from err
            try:
                current_node = getattr(current_node, nsid_segment)
                if not isinstance(current_node, NamespaceNodeBase):
                    warn(
                        "Rogue node type detected in the namespace. Will most likely cause errors."
                    )
            except AttributeError:
                raise NamespaceLookupError(
                    f"{current_node} has no attribute named '{nsid_segment}'")
            n += 1
        log.debug(f"found {_nsid_=} == {current_node.nsid=}")
        return current_node
Ejemplo n.º 19
0
    def _parse_meta_factory_function_dynamic(
            self, dictConfig: dict) -> Union[callable, None]:
        #- dyty == "dynamic type"
        log = LoggerAdapter(
            logger,
            dict(
                name_ext=
                f'{self.__class__.__name__}._parse_meta_factory_function_dynamic'
            ))
        try:
            dyty_name = dictConfig["__type__"]["name"]
            dyty_bases = dictConfig["__type__"]["bases"]
            dyty_dict = dictConfig["__type__"]["dict"]

            dyty_bases = self._parse_meta_factory_function_dynamic_bases(
                dyty_bases)

            log.debug(f"{dyty_name=}")
            log.debug(f"{dyty_bases=}")
            log.debug(f"{dyty_dict=}")
            dyty = type(dyty_name, dyty_bases, dyty_dict)
            return dyty

        except KeyError:
            return None
Ejemplo n.º 20
0
    def parse(self, dictConfig=None):
        '''
        Input:
            dictConfig: the dictConfig that initializes the namespace
        '''
        self._ns_roots = list()

        log = LoggerAdapter(logger,
                            {'name_ext': 'NamespaceConfigParser.parse'})
        log.debug("Beginning dictConfig parsing...")
        ns_collection = list()
        for key in dictConfig.keys():
            if self.prefix:
                nsid = '.'.join([self.prefix, key])
            else:
                nsid = key
            log.debug('creating new Namespace Root: {}'.format(nsid))
            cur_ns = self.new_node(nsid)
            log.debug('appending {} to ns_collection'.format(cur_ns))
            ns_collection.append(cur_ns)
            log.debug('Calling _parse_dictConfig_sub')
            self.parse_submap(dictConfig[key], cur_ns)

        log.debug('returning {}'.format(ns_collection))
        return ns_collection
Ejemplo n.º 21
0
    def __init__(self,
                 *,
                 timeout=None,
                 settle_time=None,
                 done=False,
                 success=False):
        super().__init__()
        self._tname = None
        self._lock = threading.RLock()
        self._callbacks = deque()
        self._done = done
        self.success = success
        self.timeout = None

        self.log = LoggerAdapter(logger=logger, extra={'status': self})

        if settle_time is None:
            settle_time = 0.0

        self.settle_time = float(settle_time)

        if timeout is not None:
            self.timeout = float(timeout)

        if self.done:
            # in the case of a pre-completed status object,
            # don't handle timeout
            return

        if self.timeout is not None and self.timeout > 0.0:
            thread = threading.Thread(target=self._wait_and_cleanup,
                                      daemon=True,
                                      name=self._tname)
            self._timeout_thread = thread
            self._timeout_thread.start()
Ejemplo n.º 22
0
    def build_formatter_callable(self):
        """
        Description:
            Encapsulate some logic needed to create the callable used as the Attribute
            Formatter callable.
        """
        log = LoggerAdapter(
            logger, {'name_ext': 'AttributeModel.build_formatter_callable'})
        log.debug("entering: {}".format(self))

        if self.uses_named_render_method():
            try:
                log.debug(
                    'Getting reference to formatter callable from formatter name'
                )
                #- TODO: instantiate / get references to named arguments
                render_method = functools.partial(eval(self.render_method_name),\
                    source=self.source_object)
            except NameError:
                log.error(
                    "render_method_name: '{}' seems invalid. Skipping render_method"
                )
                render_method = None
        else:
            log.debug(
                'Getting reference to formatter callable from explicit callable'
            )
            render_method = self.render_method

        log.debug("exiting")
        return render_method
Ejemplo n.º 23
0
    def _parse_meta_factory_function_dynamic_bases(self,
                                                   base_names: list) -> tuple:
        """
        Description:
            takes the list of strings of class names and turns it into a tuple of type objects
            required before passing the bases to `type` builtin
        Input:
            bases: a list of strings of base class names
        Output:
            tuple of types created from the names

        TODO: this was straight copied from _parse_meta_factory_function_static. Refactor into a shared method call
            that can capture the similar logic for importing the module and getting the symbol as an object
        """
        log = LoggerAdapter(
            logger,
            dict(
                name_ext=
                f'{self.__class__.__name__}._parse_meta_factory_function_dynamic_bases'
            ))

        bases = list()  # will be returned value

        for basename in base_names:
            module = None  #- the python module that has the class
            try:
                module_name = '.'.join(basename.split('.')[0:-1])
                symbol_name = basename.split('.')[-1]
                module = import_module(module_name)

            except ValueError:
                #- the import_module call failed
                #- we have a name, but it might not have a dot at all,
                #- which would then try to import the empty string and
                #- fail with a ValueError

                #- try to use thewired as the base import lib name
                log.debug(
                    "value error importing: \"{module_name}\". Defaulting to 'thewired'."
                )
                module = import_module("thewired")

            finally:
                if module:
                    try:
                        cls = getattr(module, symbol_name)

                    except AttributeError as err:
                        log.debug(
                            f"specified class ({symbol_name}) does not exist in specified module ({module_name})!"
                        )
                        raise ValueError(
                            f"\"{symbol_name} does not exist in {module_name}!"
                        ) from err

                    else:
                        bases.append(cls)

        return tuple(bases)
Ejemplo n.º 24
0
    def collective_eval(self, eval_string, formatter=None):
        '''
        Description:
            this is a more general version of calling a method on each provider object.
            The problem with calling one method, is that often you want to simply use the
            provider collection as a single unit directly and not have to do everything step
            by step.
            basically, this is:
                eval_string: eval('x.{}'.format(eval_string)) for x in self_collection
        Input:
            eval_string: string to pass into eval. Will first be appended to the current
                object, for every object in the filtered collection
            formatter: a callable that will take the returned value from each object in
                the filtered collection's previously described eval and can alter/format this
                return value before returning
        Output:
            results from evaluation in a collection specified by this instances collecton
                factory
        '''

        log = LoggerAdapter(logger,
                            {'name_ext': 'FilteredCollection.collective_eval'})
        log.debug('enter: eval_string: {} | formatter:{}'.format(
            eval_string, formatter))
        results = self._collection_factory()

        for c in self._filtered_collection:
            try:
                result = eval('c{}'.format(eval_string))
                log.debug('result: {}'.format(result))
                if callable(formatter):
                    result = formatter(result)
            except AttributeError:
                log.debug('eval "{}": AttributeError'.format(eval_string))
                result = FAIL_CANARY_ATTRIBUTE
            except TypeError:
                log.debug('eval "{}": TypeError'.format(eval_string))
                result = FAIL_CANARY_TYPE
            except NameError:
                log.debug('eval "{}": NameError'.format(eval_string))
                result = FAIL_CANARY_NAME

            #- keep a single flat collection
            if isinstance(result, collections.abc.Sequence):
                results.extend(result)
            else:
                results.append(result)

        if results:
            for result_x in results:
                if result_x not in self._fail_canaries:
                    log.debug('Non fail canary found: {}'.format(result_x))
                    break
            else:
                #- all are fails
                log.debug('All values are fail canaries: {}'.format(results))
                raise CollectiveEvalDelegateObjectError(results)

        return results
Ejemplo n.º 25
0
 def __getitem__(self, key):
     log = LoggerAdapter(logger, {'name_ext' : 'ProviderMap.__getitem__'})
     val = self.data.get(key, FAIL_CANARY)
     log.debug('data.get({}) returned: {}'.format(key, val))
     if val == FAIL_CANARY:
         raise ProviderMapLookupError('No provider for {}'.format(key))
     else:
         return val
Ejemplo n.º 26
0
    def remove(self, nsid: Union[str, Nsid]) -> NamespaceNodeBase:
        log = LoggerAdapter(
            logger,
            dict(name_ext=f"{self.__class__.__name__}.remove: {self.prefix=}"))
        real_nsid = self.prefix + nsid

        log.debug(f"removing: {real_nsid=}")
        return self.ns.remove(real_nsid)
Ejemplo n.º 27
0
 def __iter__(self):
     """
     Description:
         default iteration is an unfiltered shallow iteration
     """
     log = LoggerAdapter(logger, {'name_ext': 'NamespaceNode.__iter__'})
     log.debug("Default iterator invoked on {}".format(self._nsid))
     return self._shallowiterator()
Ejemplo n.º 28
0
    def __init__(self, nsid, alias_nsid, ns_items=None):
        log = LoggerAdapter(logger, {'name_ext': 'AliasNode.__init__'})
        log.debug("Initializing Alias Node: {} --> {}".format(nsid, alias_nsid))

        super().__init__(namespace_id=nsid, provider_map=None, ghost=None,\
            ns_items=ns_items)

        self._alias_nsid = alias_nsid
        self._ns_items = ns_items
Ejemplo n.º 29
0
def get_logger(investigation_id: str):
    logger = getLogger(__name__)
    logger.setLevel(INFO)
    logger = LoggerAdapter(logger, {
        "referrer": CLAIRE,
        "investigation_id": investigation_id,
    })

    return logger.info
Ejemplo n.º 30
0
def make_log_adapter(logger, cls, function_name):
    """
    Description:
        simple method to wrap the parts needed to make a logging adaptor dictionary needed
        for the way that logging is configured for this project
    """
    class_name = cls.__name__ if cls is not None else ''
    addendum = dict(name_ext='.'.join([class_name, function_name]))
    return LoggerAdapter(logger, addendum)