Example #1
0
 def __init__(self, logger, extra=None):
     if extra is None:
         extra = {}
     try:
         super(KvLoggerAdapter, self).__init__(logger, extra)
     except TypeError:  # py26 incompatibility *sigh*
         LoggerAdapter.__init__(self, logger, extra)
Example #2
0
def load_formatters(path=None, prefix='kaleidoscope.formatter'):
    log_name = '{}.load_formatters'.format(__name__)
    log = LoggerAdapter(logger, {'name_ext': log_name})

    log.debug("Loading formatters...")

    if path is None:
        path = [os.path.split(__file__)[0]]

    if prefix[-1] != '.':
        prefix += '.'

    log.debug("Walking packages. path: {} | prefix: {}".format(path, prefix))
    all_module_infos = list(pkgutil.walk_packages(path=path, prefix=prefix))
    log.debug("Package Walk generated {} ModInfos: {}".format(
        len(all_module_infos), all_module_infos))

    all_pkgs = filter(lambda x: x.ispkg, all_module_infos)
    all_modules = itertools.filterfalse(lambda x: x.ispkg, all_module_infos)

    successful_imports = list(all_pkgs)

    for modinfo in all_modules:
        try:
            new_mod = importlib.import_module(modinfo.name)
            successful_imports.append(new_mod)
        except ImportError as err:
            log.warning("Failed to import formatter module: {}: {}".format(\
                modinfo.name, err))

    return successful_imports
Example #3
0
    def __init__(self,
                 *,
                 timeout=None,
                 settle_time=None,
                 done=False,
                 success=False):
        super().__init__()
        self._tname = None
        self._lock = threading.RLock()
        self._callbacks = deque()
        self._done = done
        self.success = success
        self.timeout = None

        self.log = LoggerAdapter(logger=logger, extra={'status': self})

        if settle_time is None:
            settle_time = 0.0

        self.settle_time = float(settle_time)

        if timeout is not None:
            self.timeout = float(timeout)

        if self.done:
            # in the case of a pre-completed status object,
            # don't handle timeout
            return

        if self.timeout is not None and self.timeout > 0.0:
            thread = threading.Thread(target=self._wait_and_cleanup,
                                      daemon=True,
                                      name=self._tname)
            self._timeout_thread = thread
            self._timeout_thread.start()
Example #4
0
File: adapter.py Project: or/kaviar
 def __init__(self, logger, extra=None):
     if extra is None:
         extra = {}
     try:
         super(KvLoggerAdapter, self).__init__(logger, extra)
     except TypeError:  # py26 incompatibility *sigh*
         LoggerAdapter.__init__(self, logger, extra)
Example #5
0
    def __init__(self, options, infos, params):
        """Init method."""

        self.infos = infos

        self.old_status = self.infos["status"]["check_status"]
        self.old_status_infos = self.infos["status"]["status_infos"]
        self.infos["status"]["status_infos"] = {}

        logger_per_job = logging.getLogger(
            "spvd.jobs.%s.%s" % (self.infos["check"]["plugin"],
                                 self.infos["check"]["plugin_check"]))

        if options.nodaemon:
            logger = logging.getLogger("spvd.jobs")
        else:
            logger = logger_per_job

        # critical section around logger.handlers
        global __handler_lock__
        with __handler_lock__:
            if len(logger.handlers) == 0:
                if options.nodaemon:
                    log_handler = logging.StreamHandler(sys.stdout)
                else:
                    log_dir = options.logdir + "/" + self.infos["check"][
                        "plugin"]
                    if os.path.exists(log_dir) is False:
                        os.mkdir(log_dir)
                    log_file = "%s/%s.log" % (
                        log_dir,
                        self.infos["check"]["plugin_check"],
                    )
                    log_handler = logging.FileHandler(log_file)

                formatter_string = (
                    "%(asctime)s %(levelname)-8s %(statusid)5s " +
                    "%(plugin)s:%(check)s %(group)s %(object)s : %(message)s")
                log_handler.setFormatter(logging.Formatter(formatter_string))
                logger.addHandler(log_handler)

                if params.get("debug", False):
                    logger.setLevel(logging.DEBUG)
                else:
                    logger.setLevel(logging.INFO)

                logger.propagate = False

        # Jobs will always use logger_per_job here, even in nodaemon mode,
        # since "spvd.jobs" will trap all log messages in that case.
        self.log = LoggerAdapter(
            logger_per_job,
            {
                "plugin": self.infos["check"]["plugin"],
                "check": self.infos["check"]["plugin_check"],
                "statusid": "#" + str(self.infos["status"]["status_id"]),
                "group": self.infos["group"]["name"],
                "object": self.infos["object"]["address"],
            },
        )
Example #6
0
    def __init__(self,
                 *,
                 name=None,
                 attr_name='',
                 parent=None,
                 labels=None,
                 kind=None):
        if labels is None:
            labels = set()
        self._ophyd_labels_ = set(labels)
        if kind is None:
            kind = Kind.normal
        self.kind = kind

        super().__init__()

        # base name and ref to parent, these go with properties
        if name is None:
            name = ''
        self._attr_name = attr_name
        if not isinstance(name, str):
            raise ValueError("name must be a string.")
        self._name = name
        self._parent = parent

        self.subscriptions = {
            getattr(self, k)
            for k in dir(type(self))
            if (k.startswith('SUB') or k.startswith('_SUB'))
        }

        # dictionary of wrapped callbacks
        self._callbacks = {k: {} for k in self.subscriptions}
        # this is to maintain api on clear_sub
        self._unwrapped_callbacks = {k: {} for k in self.subscriptions}
        # map cid -> back to which event it is in
        self._cid_to_event_mapping = dict()
        # cache of last inputs to _run_subs, the semi-private way
        # to trigger the callbacks for a given subscription to be run
        self._args_cache = {k: None for k in self.subscriptions}
        # count of subscriptions we have handed out, used to give unique ids
        self._cb_count = count()
        # Create logger name from parent or from module class
        if self.parent:
            base_log = self.parent.log.name
            name = self.name.lstrip(self.parent.name + '_')
        else:
            base_log = self.__class__.__module__
            name = self.name
        self.log = LoggerAdapter(logger, {
            'base_log': base_log,
            'ophyd_object_name': name
        })
        self.control_layer_log = LoggerAdapter(control_layer_logger,
                                               {'ophyd_object_name': name})

        if not self.__any_instantiated:
            self.log.info("first instance of OphydObject: id=%s", id(self))
            OphydObject._mark_as_instantiated()
        self.__register_instance(self)
Example #7
0
    def __init__(self, source_object, spec, colors=None, prologue=None):
        """source_object: the object that is being modeled
        spec: the ObjectModelSpec object that is used to build this model
        colors: list of colors to use for attributes
        prologue: ColoredText type for things to place in output stream before the object
            rendering output
        """
        log = LoggerAdapter(logger, {'name_ext' : 'ObjectModel.__init__'})
        log.debug("Entering")

        self.source_object = source_object
        self.attribute_models = self.make_attribute_models_from_spec(spec)
        _colors = self.get_colors_from_spec(spec)
        self.set_colors(_colors)
        if colors:
            log.debug("Overriding spec colors with parameter colors")
            self.set_colors(colors)

        self.delimiter = self.get_delimiter_from_spec(spec)
        self.delimiter_colors = self.get_delimiter_colors_from_spec(spec)

        #- stuff that displays prepended to the object display
        log.debug("setting prologue: '{}'".format(prologue))
        self.prologue = prologue
        log.debug("Exiting")
Example #8
0
    def _list_leaves(self, nsids=False, cur_nsid=None):
        """
        Description:
            return a list of all the leaf nodes
        Input:
            nsids:
                - True: return a list of pairs of (nsid, leaf_node)
                - False: return a list of leaf_node's
        """
        log = LoggerAdapter(logger, {'name_ext': 'NamespaceNode._list_leaves'})
        log.debug("invoked on: {} | cur_nsid: {} ".format(
            self._nsid, cur_nsid))
        leaves = list()

        for nsid, ns_item in self._all(nsids=True):
            try:
                #- if an object has this method, we don't consider it a leaf itself
                next_leaves = ns_item._list_leaves(nsids=nsids, cur_nsid=nsid)
                log.debug("extending with {}".format(next_leaves))
                leaves += next_leaves

            except (TypeError, AttributeError):
                log.debug("leaf found: {}".format(ns_item))
                if nsids:
                    leaf = (nsid, ns_item)
                else:
                    leaf = ns_item
                leaves.append(leaf)

        log.debug("generated leaves: {}".format(leaves))
        return leaves
Example #9
0
 def logReport(self, log: LoggerAdapter):
     """
     Create an INFO log message to log the Connection report using the keys in self.stats.
     :param log: Logger to use to log the report
     """
     keys = ", ".join([f"{key}: %({key})s" for key in self.stats.keys()])
     log.info(f"Connection report: {keys}", self.stats)
Example #10
0
    def get_params(cls, map, *args, **kwargs):
        """
        Description:
            get the raw parameters from a formatted params map
        Input:
            map:
                the params map
            *args: ignored
            **kwargs: overlay parameters
        Ouput: a 2-tuple of (method_name, params dict)
        """

        log = LoggerAdapter(logger, {'name_ext': 'ParametizedCall.get_params'})
        log.debug("Entered")
        log.debug("kwargs: {}".format(str(kwargs)))

        params_marker = cls._param_dict_mark_key

        params = copy.deepcopy(map[params_marker])
        log.debug("raw params: {}".format(params))
        method_name = params['defaults'].pop('method_name')
        param_chain = collections.ChainMap(kwargs, params['defaults'])
        param_set_name = kwargs.pop('_params', None)
        if param_set_name:
            param_chain.maps.insert(1, params[param_set_name])

        log.debug("Exiting")
        return (method_name, param_chain)
Example #11
0
def set_up_config(logger_adapter: logging.LoggerAdapter) -> Optional[dict]:
    try:
        config = get_config(logger_adapter)
        return config
    except KeyError as e:
        logger_adapter.error(
            f"Environment variable {e} is not set correctly...Exiting")
        sys.exit(1)
Example #12
0
 def __iter__(self):
     """
     Description:
         default iteration is an unfiltered shallow iteration
     """
     log = LoggerAdapter(logger, {'name_ext': 'NamespaceNode.__iter__'})
     log.debug("Default iterator invoked on {}".format(self._nsid))
     return self._shallowiterator()
Example #13
0
    def remove(self, nsid: Union[str, Nsid]) -> NamespaceNodeBase:
        log = LoggerAdapter(
            logger,
            dict(name_ext=f"{self.__class__.__name__}.remove: {self.prefix=}"))
        real_nsid = self.prefix + nsid

        log.debug(f"removing: {real_nsid=}")
        return self.ns.remove(real_nsid)
Example #14
0
 def __getitem__(self, key):
     log = LoggerAdapter(logger, {'name_ext' : 'ProviderMap.__getitem__'})
     val = self.data.get(key, FAIL_CANARY)
     log.debug('data.get({}) returned: {}'.format(key, val))
     if val == FAIL_CANARY:
         raise ProviderMapLookupError('No provider for {}'.format(key))
     else:
         return val
Example #15
0
    def add(self, nsid: Union[str, Nsid], *args,
            **kwargs) -> List[NamespaceNodeBase]:
        log = LoggerAdapter(
            logger,
            dict(name_ext=f"{self.__class__.__name__}.add: {self.prefix=}"))
        real_nsid = self.prefix + nsid

        log.debug(f"adding {real_nsid=}")
        return self.ns.add(real_nsid, *args, **kwargs)
Example #16
0
 def _shallowiterator(self):
     """
     Description:
         shallow iteration over all items in _ns_items
     """
     log = LoggerAdapter(logger,
                         {'name_ext': 'NamespaceNode._shallowiterator'})
     log.debug("Shallow iterator invoked on {}".format(self._nsid))
     return iter(self._all(nsids=False))
Example #17
0
def parse_robots_txt(
        records: Iterable[Iterable[Tuple[int, str, str]]],
        logger: LoggerAdapter) -> Mapping[str, Iterable[Tuple[bool, str]]]:
    """Parses C{robots.txt} records.

    @param records:
        Tokenized records as produced by L{scan_robots_txt}.
    @param logger:
        Problems found while parsing are logged here.
    @return:
        rules_map: C{{ user_agent: (allowed, url_prefix)* }}
        A mapping from user agent name (case-folded) to a sequence of
        allow/disallow rules, where C{allowed} is C{True} iff the user agent
        is allowed to visit URLs starting with C{url_prefix}.
    """
    result: Dict[str, Iterable[Tuple[bool, str]]] = {}
    unknowns: Set[str] = set()
    for record in records:
        seen_user_agent = False
        rules: List[Tuple[bool, str]] = []
        for lineno, field, value in record:
            if field == 'user-agent':
                if rules:
                    logger.error(
                        'Line %d specifies user agent after rules; '
                        'assuming new record', lineno)
                    rules = []
                seen_user_agent = True
                name = value.casefold()
                if name in result:
                    logger.error(
                        'Line %d specifies user agent "%s", which was '
                        'already addressed in an earlier record; '
                        'ignoring new record', lineno, value)
                else:
                    result[name] = rules
            elif field in ('allow', 'disallow'):
                if seen_user_agent:
                    try:
                        path = unescape_path(value)
                    except ValueError as ex:
                        logger.error('Bad escape in %s URL on line %d: %s',
                                     field, lineno, ex)
                    else:
                        # Ignore allow/disallow directives without a path.
                        if path:
                            rules.append((field == 'allow', path))
                else:
                    logger.error(
                        'Line %d specifies %s rule without a preceding '
                        'user agent line; ignoring line', lineno, field)
            else:
                # Unknown fields are allowed for extensions.
                if field not in unknowns:
                    unknowns.add(field)
                    logger.info('Unknown field "%s" (line %d)', field, lineno)
    return result
Example #18
0
    def __init__(self, nsid, alias_nsid, ns_items=None):
        log = LoggerAdapter(logger, {'name_ext': 'AliasNode.__init__'})
        log.debug("Initializing Alias Node: {} --> {}".format(nsid, alias_nsid))

        super().__init__(namespace_id=nsid, provider_map=None, ghost=None,\
            ns_items=ns_items)

        self._alias_nsid = alias_nsid
        self._ns_items = ns_items
Example #19
0
def shell_output_logger(console_output: str, prefix_item: str, logger: logging.LoggerAdapter, logging_level: int):
    """
    Logs the shell output (stdout/err) a line at a time with an option to remove ANSI control chars.
    """
    logger.log(logging_level, prefix_item)

    if not console_output:
        return

    logger.log(logging_level, console_output)
Example #20
0
    def __init__(self,
                 *,
                 timeout=None,
                 settle_time=0,
                 done=None,
                 success=None):
        super().__init__()
        self._tname = None
        self._lock = threading.RLock()
        self._event = threading.Event()  # state associated with done-ness
        self._settled_event = threading.Event()
        # "Externally initiated" means set_finished() or set_exception(exc) was
        # called, as opposed to completion via an internal timeout.
        self._externally_initiated_completion_lock = threading.Lock()
        self._externally_initiated_completion = False
        self._callbacks = deque()
        self._exception = None

        self.log = LoggerAdapter(logger=logger, extra={'status': self})

        if settle_time is None:
            settle_time = 0.0

        self._settle_time = float(settle_time)

        if timeout is not None:
            timeout = float(timeout)
        self._timeout = timeout

        # We cannot know that we are successful if we are not done.
        if success and not done:
            raise ValueError(
                "Cannot initialize with done=False but success=True.")
        if done is not None or success is not None:
            warn(
                "The 'done' and 'success' parameters will be removed in a "
                "future release. Use the methods set_finished() or "
                "set_exception(exc) to mark success or failure, respectively, "
                "after the Status has been instantiated.", DeprecationWarning)

        self._callback_thread = threading.Thread(target=self._run_callbacks,
                                                 daemon=True,
                                                 name=self._tname)
        self._callback_thread.start()

        if done:
            if success:
                self.set_finished()
            else:
                exc = UnknownStatusFailure(
                    f"The status {self!r} has failed. To obtain more specific, "
                    "helpful errors in the future, update the Device to use "
                    "set_exception(...) instead of setting success=False "
                    "at __init__ time.")
                self.set_exception(exc)
Example #21
0
    def parse(self,
              dictConfig: dict,
              prefix: str = '') -> Union[Namespace, None]:
        """
        Description:
            parse a configDict into a Namespace object
        Input:
            configDict - the configuration file parsed into a dictionary
            prefix - the rolling prefix for this parse, used to collect when recursively
                called
            namespace - what namespace to add the new nodes parsed to 
                        (if not specified, will use namespace_factory to create a new one)
            namespace_factory - creates a new namespace object when an existing one is not passed in via `namespace`
                                Only tested with thewired.namespace.Namespace class ATM
        Output:
            a namespace object representing the nodes specifed in the dictConfig object
        """
        log = LoggerAdapter(logger,
                            dict(name_ext=f'{self.__class__.__name__}.parse'))

        log.debug(f"enter: {prefix=} {dictConfig=}")
        ns = self.ns

        try:
            dictConfig.keys()
        except (AttributeError, TypeError):
            return None

        #- create namespace as dictConfig describes
        for key in dictConfig.copy().keys():

            if key in self._input_mutator_targets:
                dictConfig, key = self._input_mutator(dictConfig, key)

            #- NB: meta keys can not be top level keys with this current pattern
            if key not in self.meta_keys:
                log.debug(f"parsing {key=}")
                node_factory = self._create_factory(dictConfig[key],
                                                    self.default_node_factory)

                if node_factory:
                    new_node_nsid = nsid.make_child_nsid(prefix, key)
                    log.debug(f"{new_node_nsid=}")
                    new_node = ns.add_exactly_one(new_node_nsid, node_factory)

                    if isinstance(dictConfig[key], Mapping):
                        self.parse(dictConfig=dictConfig[key],
                                   prefix=new_node_nsid)
                    else:
                        log.debug(
                            f"setting {new_node.nsid}.{key} to {dictConfig[key]}"
                        )
                        setattr(new_node, key, dictConfig[key])

        return ns
Example #22
0
    def __init__(self, options, infos, params):
        """ Init method. """

        self.infos = infos

        self.old_status = self.infos['status']['check_status']
        self.old_status_infos = self.infos['status']['status_infos']
        self.infos['status']['status_infos'] = {}

        logger_per_job = logging.getLogger(
            "spvd.jobs.%s.%s" % (self.infos['check']['plugin'],
                                 self.infos['check']['plugin_check']))

        if options.nodaemon:
            logger = logging.getLogger("spvd.jobs")
        else:
            logger = logger_per_job

        # critical section around logger.handlers
        global __handler_lock__
        with __handler_lock__:
            if len(logger.handlers) == 0:
                if options.nodaemon:
                    log_handler = logging.StreamHandler(sys.stdout)
                else:
                    log_dir = options.logdir + '/' + self.infos['check'][
                        'plugin']
                    if os.path.exists(log_dir) is False:
                        os.mkdir(log_dir)
                    log_file = "%s/%s.log" % (
                        log_dir, self.infos['check']['plugin_check'])
                    log_handler = logging.FileHandler(log_file)

                formatter_string = '%(asctime)s %(levelname)-8s %(statusid)5s ' + \
                        '%(plugin)s:%(check)s %(group)s %(object)s : %(message)s'
                log_handler.setFormatter(logging.Formatter(formatter_string))
                logger.addHandler(log_handler)

                if params.get('debug', False):
                    logger.setLevel(logging.DEBUG)
                else:
                    logger.setLevel(logging.INFO)

                logger.propagate = False

        # Jobs will always use logger_per_job here, even in nodaemon mode,
        # since "spvd.jobs" will trap all log messages in that case.
        self.log = LoggerAdapter(
            logger_per_job, {
                'plugin': self.infos['check']['plugin'],
                'check': self.infos['check']['plugin_check'],
                'statusid': "#" + str(self.infos['status']['status_id']),
                'group': self.infos['group']['name'],
                'object': self.infos['object']['address']
            })
Example #23
0
    def __init__(self, colors=None, attributes=None, description=None,\
        delimiter=' | ', delimiter_colors=None):
        """
        Input:
            colors: list of kaleidoscope color names to use in styling the object's attributes
            attributes: list of names of attributes to render
                you can optionally use a list of 2 or 3-tuples where:
                    the first value is the attribute name
                    the second value is the length of the attribute rendering
                    the third value is the NSID of a formatter in the formatter namespace
                The AttributeSpec type exists as well to be passed in here
            description: the description of this object model spec
            delimiter: string to place between attributes on a single line
            delimiter_color: color of delimiter string

        Notes:
            All parameters are optional, but you probably want to fill in the attributes before using this
        """
        log = LoggerAdapter(logger, {'name_ext': 'ObjectModelSpec.__init__'})
        msg = "Entered: colors: {} | attributes: {}".format(colors, attributes)
        msg += " | description: {} | delimiter: {}".format(
            description, delimiter)
        msg += " | delimiter_colors: {}".format(delimiter_colors)
        log.debug(msg)
        self.delimiter = str(delimiter)
        self.description = description

        if colors:
            _colors = list()
            for _color in colors:
                _colors.append(Color(_color))
            self.colors = cycle(_colors)
        else:
            self.colors = repeat(None)

        if delimiter_colors:
            _delimiter_colors = list()
            for _dcolor in delimiter_colors:
                _delimiter_colors.append(Color(_dcolor))
            self.delimiter_colors = cycle(_delimiter_colors)
        else:
            self.delimiter_colors = copy.copy(self.colors)

        #- locally store parsed attributes as AttributeSpec objects
        if attributes:
            if not isinstance(attributes, Iterable):
                attributes = [attributes]
            log.debug("parsing attributes: {}".format(attributes))
            self.attributes = self.parse_attributes(attributes)
        else:
            log.info("spec has no attributes specified.")
            self.attributes = None

        log.debug("initialized: {}".format(self))
async def putflag_test(task: PutflagCheckerTaskMessage, session_a: AsyncClient,
                       session_b: AsyncClient, db: ChainDB,
                       logger: LoggerAdapter) -> None:
    await register_user(session_a)
    logger.debug("registered user for session_a")
    (username_b, password_b) = await register_user(session_b)
    logger.debug("registered user for session_b")

    await send_message(session_a, username_b, task.flag)

    await db.set("credentials", (username_b, password_b))
Example #25
0
    def get(self, nsid: Union[str, Nsid]) -> NamespaceNodeBase:
        log = LoggerAdapter(
            logger,
            dict(name_ext=f"{self.__class__.__name__}.get: {self.prefix=}"))
        if nsid == self.delineator:
            real_nsid = self.prefix
        else:
            real_nsid = self.prefix + nsid

        log.debug(f"getting {real_nsid=}")
        return self.ns.get(real_nsid)
Example #26
0
    def __setitem__(self, key, value):
        """
        Description:
            set local dict [key] = value

        Input:
            key: key name
            value: value to set for key
        """
        log = LoggerAdapter(logger, {'name_ext': 'NsidChainMap.__setitem__'})
        log.debug("setting local dict {} to {}".format(key, value))
        self.data[key] = value
Example #27
0
    def init_object_spec_ns(self, file=defaults.object_spec_file, root=None):
        """
        Parse the object spec config file into self.spec.object namespace
        """

        log = LoggerAdapter(logger, {'name_ext': 'Render.init_object_spec_ns'})
        root = self.spec.object if root is None else root
        dictConfig = load_yaml_file(filename=file)
        parser = ObjectSpecConfigParser(nsroot=self.spec)
        ns_roots = parser.parse(dictConfig)
        log.debug(f"object spec ns roots: {ns_roots}")
        for ns_x in ns_roots:
            root._add_ns(ns_x)
Example #28
0
    def _parse_meta_factory_function_dynamic(
            self, dictConfig: dict) -> Union[callable, None]:
        #- dyty == "dynamic type"
        log = LoggerAdapter(
            logger,
            dict(
                name_ext=
                f'{self.__class__.__name__}._parse_meta_factory_function_dynamic'
            ))
        try:
            dyty_name = dictConfig["__type__"]["name"]
            dyty_bases = dictConfig["__type__"]["bases"]
            dyty_dict = dictConfig["__type__"]["dict"]

            dyty_bases = self._parse_meta_factory_function_dynamic_bases(
                dyty_bases)

            log.debug(f"{dyty_name=}")
            log.debug(f"{dyty_bases=}")
            log.debug(f"{dyty_dict=}")
            dyty = type(dyty_name, dyty_bases, dyty_dict)
            return dyty

        except KeyError:
            return None
Example #29
0
    def __getitem__(self, key):
        """
        Description:
            Go through self and the sequence of maps to find the first match for the given
            key

        Input:
            key: key for the item to get
        """
        log = LoggerAdapter(logger, {'name_ext': 'NsidChainMap.__getitem__'})
        value = self._LOOKUP_FAIL_CANARY
        try:
            value = self.data[key]
            return value
        except KeyError:
            log.debug("{} not found in local dict".format(key))
            for m_nsid in self.map_nsids:
                try:
                    map = self.nsroot._lookup(m_nsid)
                except NamespaceLookupError:
                    log.warning('Unable to lookup map: {}'.format(m_nsid))

                try:
                    value = map[key]
                    break
                except KeyError:
                    log.debug('KeyError in {}. Trying next...'.format(m_nsid))
                    continue

            if value == self._LOOKUP_FAIL_CANARY:
                self.__missing__(key)
            else:
                return value
Example #30
0
    def render_object_from_specname(self, obj, specname, align=True):
        """
        Description:
            Render an object by looking up a spec object by name
        Input:
            obj: object to be rendered
            spec_name: spec_name to lookup and use to render object
            colors: group colors (per-object colors)
        Notes:
            grabs the ObjectModelSpec and calls render_object_from_spec
        """
        log = LoggerAdapter(logger,
                            {'name_ext': 'Render.render_obj_from_specname'})
        log.debug("Entering")

        #- create a chain map from all the spec nodes in the namespace
        specmaps = list()
        nsid = specname
        while nsid:
            specmaps.append(self.spec.object._lookup(nsid).specmap)
            nsid = '.'.join(nsid.split('.')[0:-1])
        spec_chain = collections.ChainMap(*specmaps)
        log.debug(
            "Creating ObjectModelSpec from spec.object namespace with ChainMap"
        )
        log.debug("  ChainMap keys: {}".format(spec_chain.keys()))

        spec = ObjectModelSpec(**spec_chain)
        return self.render_object_from_spec(obj, spec, align=align)
Example #31
0
 def get_render_data(self):
     """
     Description:
         return self as a collection of ColoredText instances
     """
     log = LoggerAdapter(logger, {'name_ext' : '{}.get_render_data'.format(\
             self.__class__.__name__)})
     outputs = list()
     outputs.append(self.prologue)
     outputs.append(ColoredText(self.text, self.color))
     outputs.append(self.epilogue)
     ard = AttributeRenderDatum(*outputs)
     log.debug("returning: {}".format(ard))
     return ard
    def process(self, msg, kwargs):
        (msg, kwargs) = LoggerAdapter.process(self, msg, kwargs)
        kwargs['extra']['timestamp'] = datetime.datetime.now()

        stack = inspect.stack()
        if len(stack) > self.__stack_depth:
            log_line = stack[self.__stack_depth]
            kwargs['extra']['caller'] = os.path.basename(log_line[1]) + ":" + str(log_line[2])

        return msg, kwargs
Example #33
0
    def __init__(self, options, infos, params):
        """ Init method. """

        self.infos = infos

        self.old_status = self.infos['status']['check_status']
        self.old_status_infos = self.infos['status']['status_infos']
        self.infos['status']['status_infos'] = {}

        logger_per_job = logging.getLogger("spvd.jobs.%s.%s" % (self.infos['check']['plugin'],
                                                                self.infos['check']['plugin_check']))

        if options.nodaemon:
            logger = logging.getLogger("spvd.jobs")
        else:
            logger = logger_per_job

        # critical section around logger.handlers
        global __handler_lock__
        with __handler_lock__:
            if len(logger.handlers) == 0:
                if options.nodaemon:
                    log_handler = logging.StreamHandler(sys.stdout)
                else:
                    log_dir = options.logdir + '/' + self.infos['check']['plugin']
                    if os.path.exists(log_dir) is False:
                        os.mkdir(log_dir)
                    log_file = "%s/%s.log" % (log_dir, self.infos['check']['plugin_check'])
                    log_handler = logging.FileHandler(log_file)

                formatter_string = '%(asctime)s %(levelname)-8s %(statusid)5s ' + \
                        '%(plugin)s:%(check)s %(group)s %(object)s : %(message)s'
                log_handler.setFormatter(logging.Formatter(formatter_string))
                logger.addHandler(log_handler)

                if params.get('debug', False):
                    logger.setLevel(logging.DEBUG)
                else:
                    logger.setLevel(logging.INFO)

                logger.propagate = False

        # Jobs will always use logger_per_job here, even in nodaemon mode,
        # since "spvd.jobs" will trap all log messages in that case.
        self.log = LoggerAdapter(logger_per_job, {
            'plugin':   self.infos['check']['plugin'],
            'check':    self.infos['check']['plugin_check'],
            'statusid': "#" + str(self.infos['status']['status_id']),
            'group':    self.infos['group']['name'],
            'object':   self.infos['object']['address']})
Example #34
0
class BaseJob(object):
    """ Base class for job implementation in spvd. """

    class BaseError(Exception):
        """ Base class for BaseJob Exceptions. """

        def __init__(self, error):
            """ Init method. """
            Exception.__init__(self, error)

    _valid_status = ('FINISHED', 'WARNING', 'ERROR')

    def __init__(self, options, infos, params):
        """ Init method. """

        self.infos = infos

        self.old_status = self.infos['status']['check_status']
        self.old_status_infos = self.infos['status']['status_infos']
        self.infos['status']['status_infos'] = {}

        logger_per_job = logging.getLogger("spvd.jobs.%s.%s" % (self.infos['check']['plugin'],
                                                                self.infos['check']['plugin_check']))

        if options.nodaemon:
            logger = logging.getLogger("spvd.jobs")
        else:
            logger = logger_per_job

        # critical section around logger.handlers
        global __handler_lock__
        with __handler_lock__:
            if len(logger.handlers) == 0:
                if options.nodaemon:
                    log_handler = logging.StreamHandler(sys.stdout)
                else:
                    log_dir = options.logdir + '/' + self.infos['check']['plugin']
                    if os.path.exists(log_dir) is False:
                        os.mkdir(log_dir)
                    log_file = "%s/%s.log" % (log_dir, self.infos['check']['plugin_check'])
                    log_handler = logging.FileHandler(log_file)

                formatter_string = '%(asctime)s %(levelname)-8s %(statusid)5s ' + \
                        '%(plugin)s:%(check)s %(group)s %(object)s : %(message)s'
                log_handler.setFormatter(logging.Formatter(formatter_string))
                logger.addHandler(log_handler)

                if params.get('debug', False):
                    logger.setLevel(logging.DEBUG)
                else:
                    logger.setLevel(logging.INFO)

                logger.propagate = False

        # Jobs will always use logger_per_job here, even in nodaemon mode,
        # since "spvd.jobs" will trap all log messages in that case.
        self.log = LoggerAdapter(logger_per_job, {
            'plugin':   self.infos['check']['plugin'],
            'check':    self.infos['check']['plugin_check'],
            'statusid': "#" + str(self.infos['status']['status_id']),
            'group':    self.infos['group']['name'],
            'object':   self.infos['object']['address']})

    def set_check_status(self, check_status, check_message, status_infos=None):
        """ Helper function to prepare check's status. """

        #self.log.warning('This module is using [set_check_status] which is deprecated.'
        #    ' Please upgrade it or fill a bug report if an update does not exist.')

        if check_status not in self._valid_status:
            message = 'Job returned an invalid status <%s>' % check_status
            self.log.error(message)
            raise BaseJob.BaseError(message)

        self.infos['status']['check_message'] = check_message
        self.infos['status']['check_status'] = check_status
        if status_infos:
            self.infos['status']['status_infos'].update(status_infos)

    def run(self):
        """ Starts the job implemented by this plugin. """

        status, message = '', ''
        try:
            tmp_ret = self.go()
            try:
                # Done in a separate try..except to avoid shadowing
                # TypeError exceptions from plugins
                status, message = tmp_ret
            except TypeError, error:
                # Transitional catch
                # self.log.warning('This module is not returning its status like it should.'
                #    ' This is a deprecated behavior.'
                #    ' Please upgrade it or fill a bug report if an update does not exist.')
                status = self.infos['status']['check_status']
                message = self.infos['status']['check_message']

        except (BaseJob.BaseError, BaseJobRuntimeError), error:
            # Expected exception, nothing to worry about
            self.log.error(str(error))
            status, message = 'ERROR', str(error)

        except Exception, error:
            # Unexpected exception, should log a traceback
            self.log.critical('Fatal error: job stopped')
            self.log.critical(traceback.format_exc())
            status, message = 'ERROR', str(error)
Example #35
0
 def __init__(self, logger, extra):
     LoggerAdapter.__init__(self, logger, extra)
     self.name = logger.name