Example #1
0
    def _migrate_ring_to_config(self, model):
        if 'configuration-data' not in model['2.0']:
            model['2.0']['configuration-data'] = []

        #
        # Copying just the first ring-specification - there should only be
        # one if we are running this migrator
        #
        ring_copy = deepcopy(model['2.0']['ring-specifications'][0])
        ring_data = {
            'control_plane_rings':
            DataTransformer(ring_copy).all_output('-', '_')
        }
        cp_name = model['2.0']['control-planes'][0]['name']
        config_name = 'SWIFT-CONFIG-%s' % cp_name.upper()
        swift_ring = {
            'name': config_name,
            'services': ['swift'],
            'data': ring_data
        }

        model['2.0']['configuration-data'].append(swift_ring)
        if 'configuration-data' not in model['2.0']['control-planes'][0]:
            model['2.0']['control-planes'][0]['configuration-data'] = []
        model['2.0']['control-planes'][0]['configuration-data'].append(
            config_name)

        for config in self._config_files:
            if config['version'] == 2:
                config.pop('ring-specifications')
        model['2.0'].pop('ring-specifications')
Example #2
0
def main(killed_event=None):
    p = argparse.ArgumentParser(
        description='Run a Manhattan worker with a TimeRotatingLog.')

    p.add_argument('-v',
                   '--verbose',
                   dest='verbose',
                   action='store_true',
                   default=False,
                   help='Print detailed output')
    p.add_argument('-p',
                   '--path',
                   dest='input_log_path',
                   type=str,
                   help='Input Manhattan log path')
    p.add_argument('--log',
                   dest='error_log_path',
                   type=str,
                   help='Path to error/debug log')
    p.add_argument('-u', '--url', dest='url', type=str, help='SQL backend URL')
    p.add_argument('-c',
                   '--complex',
                   dest='complex',
                   action='append',
                   help='Configure complex goal, like '
                   'name|include a, include b|exclude a')
    p.add_argument('--bind',
                   type=str,
                   default='tcp://127.0.0.1:5555',
                   help='ZeroMQ socket description to bind to')

    p.add_argument('--config',
                   type=str,
                   help='Python namespace to use for configuration')

    args = p.parse_args()

    if args.config:
        config = load_python_config(args.config)
    else:
        config = load_args_config(args)

    logging.config.dictConfig(
        logging_config(config.pop('verbose'), config.pop('error_log_path')))

    input_log_path = config.pop('input_log_path')
    bind = config.pop('bind')
    backend = Backend(**config)
    manhattan.server_backend = backend

    mhlog = TimeRotatingLog(input_log_path)
    worker = Worker(mhlog, backend, stats_every=5000)

    server = Server(backend, bind=bind)
    server.start()

    try:
        worker.run(stay_alive=True, killed_event=killed_event)
    finally:
        server.kill()
Example #3
0
    def configure(self, config):
        factory = unlock.UnlockFactory()
        level = 0
        max_level = 0
        while level <= max_level:
            pop_keys = set([])
            for key, value in config.items():
                if not "singleton" in value:
                    continue

                level_value = value["singleton"]
                if level_value > max_level:
                    max_level = level_value

                if level_value <= level:
                    # print(key, value)
                    assert "name" in value
                    factory.create_singleton(value["name"], key, config)
                    pop_keys.add(key)

            for key in pop_keys:
                config.pop(key)
            level += 1

        for key, value in config.items():
            if "main" in value:
                newobj = factory.create(key, config)
                unlock_instance = newobj
                break
        assert unlock_instance
        return unlock_instance
Example #4
0
    def enable_logger(name, handlers=None):
        """enable a specific logger by name
        this can be used to modify the logging behavior if the default (root)
        logger's structure is not adequate

        Args:
            name (str): logger's name
            handlers (str/logging.Handler/list): handlers to be used (can be name(s)
                or on instance of logging.Handler)
        """
        if isinstance(handlers, str):
            handlers = [handlers]
        # load default config
        with open(util.get_config('logging'), 'r') as file_:
            config = json.load(file_)
        config.pop('root', None)
        # setup config
        loggers = {
            name: {
                "handlers": handlers,
                "propagate": False
            }
        }
        config['loggers'] = loggers
        logging.config.dictConfig(config)
        # fetch empty handlers
        if not handlers:
            logger = logging.getLogger(name)
            logger.handlers = []
            logger.addHandler(logging.NullHandler())
Example #5
0
def _get_config(path_or_dict=None):
    """ Loads a yaml/json file, or python dict """

    if path_or_dict is None:
        path_or_dict = os.environ.get('LOG_CONFIG', None)

    config = {}
    if isinstance(path_or_dict, dict):
        config = path_or_dict

    elif os.path.exists(path_or_dict):
        with open(path_or_dict) as f:
            data = f.read()

            # Can load both yaml and json files
            config = yaml.safe_load(data)

    elif path_or_dict is not None:
        raise ValueError(('Config could not be loaded: ', str(path_or_dict)))
    else:
        LOG.info('Using default logging config')

    # always need a default formatter, handler and logger
    config.setdefault('formatters', {})
    config.setdefault('handlers', {})
    config.setdefault('loggers', {})

    default_log_format = config.pop('default_log_format', DEFAULT_LOG_FORMAT)
    default_log_level = config.pop('default_log_level', 'DEBUG')

    default_formatter = {
        'format': default_log_format,
        'class': 'colored_formatter.ColoredFormatter',
        'style': '{',
    }

    default_handler = {
        'level': 'DEBUG',
        'class': 'logging.StreamHandler',
        'formatter': 'default',
    }

    default_handlers = [
        handler for handler in config['handlers'].keys()
        if config['handlers'][handler] is not None
    ]

    default_logger = {'level': default_log_level, 'handlers': default_handlers}

    config['formatters'].setdefault('default', default_formatter)

    # logging to console can be disabled by setting the console handler to None
    config['handlers'].setdefault('console', default_handler)

    # set the global root logger config
    config['loggers'].setdefault('', default_logger)

    return config
Example #6
0
    def config_file(path):
        def load_yaml(yml_path):
            try:
                with open(yml_path) as yml_file:
                    return yaml.load(yml_file)
            except IOError as exc:
                raise argparse.ArgumentTypeError(
                    'Failed to open {}: {}'.format(yml_path, exc))
            except yaml.YAMLError as exc:
                raise argparse.ArgumentTypeError(
                    'Failed to parse YAML from {}: {}'.format(yml_path, exc))

        config = load_yaml(path)
        try:
            config['credentials']
            config['repository']
            config['tasks_file']
            config['logging']
        except KeyError as exc:
            raise argparse.ArgumentTypeError(
                'Missing required section {} in configuration.', exc)

        try:
            whitelist_file = config.pop('whitelist_file')
        except KeyError:
            logger.warning('No whitelist file supplied. Manual approval will '
                           'be needed for all PRs.')
            config['whitelist'] = []
        else:
            config['whitelist'] = load_yaml(whitelist_file)

        return config
Example #7
0
    def __init__(self, probe, options=None, **kwargs):
        self._probe = probe
        self._closed = True
        self._inited = False

        # Update options.
        self._options = options or {}
        self._options.update(kwargs)

        # Bail early if we weren't provided a probe.
        if probe is None:
            self._board = None
            return

        # Apply common configuration settings from the config file.
        config = self._get_config()
        probesConfig = config.pop('probes', None)
        self._options.update(config)

        # Pick up any config file options for this board.
        if probesConfig is not None:
            for uid, settings in probesConfig.items():
                if uid.lower() in probe.unique_id.lower():
                    log.info("Using config settings for board %s" %
                             (probe.unique_id))
                    self._options.update(settings)

        # Ask the probe if it has an associated board, and if not then we create a generic one.
        self._board = probe.create_associated_board(self) \
                        or Board(self, self._options.get('target_override', None))
Example #8
0
def _get_target_type(config: Any,
                     kwargs: Any) -> Union[type, Callable[..., Any]]:
    kwargs_target = None
    config_target = None
    if "_target_" in kwargs:
        kwargs_target = kwargs.pop("_target_")

    if "_target_" in config:
        config_target = config.pop("_target_")

    target = None
    if kwargs_target is not None:
        target = kwargs_target
    elif config_target is not None:
        target = config_target

    if target is None:
        raise InstantiationException("Unable to determine target")

    if isinstance(target, str):
        return _locate(target)
    elif isinstance(target, type):
        return target
    elif callable(target):
        return target  # type: ignore
    else:
        raise InstantiationException(
            f"Unsupported target type : {type(target)}")
Example #9
0
def main(killed_event=None):
    p = argparse.ArgumentParser(
        description='Run a Manhattan worker with a TimeRotatingLog.')

    p.add_argument('-v', '--verbose', dest='verbose', action='store_true',
                   default=False, help='Print detailed output')
    p.add_argument('-p', '--path', dest='input_log_path', type=str,
                   help='Input Manhattan log path')
    p.add_argument('--log', dest='error_log_path', type=str,
                   help='Path to error/debug log')
    p.add_argument('-u', '--url', dest='url', type=str,
                   help='SQL backend URL')
    p.add_argument('-c', '--complex', dest='complex', action='append',
                   help='Configure complex goal, like '
                   'name|include a, include b|exclude a')
    p.add_argument('--bind', type=str,
                   default='tcp://127.0.0.1:5555',
                   help='ZeroMQ socket description to bind to')

    p.add_argument('--config', type=str,
                   help='Python namespace to use for configuration')

    args = p.parse_args()

    if args.config:
        config = load_python_config(args.config)
    else:
        config = load_args_config(args)

    logging.config.dictConfig(logging_config(config.pop('verbose'),
                                             config.pop('error_log_path')))

    input_log_path = config.pop('input_log_path')
    bind = config.pop('bind')
    backend = Backend(**config)
    manhattan.server_backend = backend

    mhlog = TimeRotatingLog(input_log_path)
    worker = Worker(mhlog, backend, stats_every=5000)

    server = Server(backend, bind=bind)
    server.start()

    try:
        worker.run(stay_alive=True, killed_event=killed_event)
    finally:
        server.kill()
Example #10
0
def register_store_handler(parser, store_manager, config):
    """
    Registers a new store handler type with a StoreHandlerManager.
    This function extracts and validates information required for
    registration from the configuration dictionary.

    :param parser: An argument parser
    :type parser: argparse.ArgumentParser
    :param store_manager: A store manager
    :type store_manager: commissaire.store.storehandlermanager.
                         StoreHandlerManager
    :param config: A configuration dictionary
    :type config: dict
    """
    # Import the handler class.
    try:
        module_name = config.pop('name')
    except KeyError:
        parser.error(
            'Store handler configuration missing "name" key: '
            '{}'.format(config))
    try:
        module = importlib.import_module(module_name)
        handler_type = getattr(module, 'StoreHandler')
    except ImportError:
        parser.error(
            'Invalid store handler module name: {}'.format(module_name))

    # Import the model classes.
    module = importlib.import_module('commissaire.handlers.models')
    available = {k: v for k, v in module.__dict__.items() if
                 isinstance(v, type) and issubclass(v, module.Model)}
    model_types = set()
    for pattern in config.pop('models', ['*']):
        matches = fnmatch.filter(available.keys(), pattern)
        if not matches:
            parser.error('No match for model: {}'.format(pattern))
        model_types.update([available[name] for name in matches])

    try:
        store_manager.register_store_handler(
            handler_type, config, *model_types)
    except ConfigurationError as error:
        parser.error(
            'Configuration error for store handler "{0}": '
            '{1}'.format(module_name, error.message))
Example #11
0
 def _getcls(field: str) -> str:
     if pop:
         classname = config.pop(field)
     else:
         classname = config[field]
     if not isinstance(classname, str):
         raise InstantiationException(f"_target_ field '{field}' must be a string")
     return classname
Example #12
0
def configure_logging(config):
    config = config.copy()
    if config.pop('http_debug', False):
        http.client.HTTPConnection.debuglevel = 1
    else:
        http.client.HTTPConnection.debuglevel = 0

    if config.get('coloredlogs'):
        conf = config.pop('coloredlogs').copy()
        conf['field_styles'] = dict_merge(coloredlogs.DEFAULT_FIELD_STYLES,
                                          conf.get('field_styles', {}))
        conf['level_styles'] = dict_merge(coloredlogs.DEFAULT_LEVEL_STYLES,
                                          conf.pop('level_styles', {}))
        coloredlogs.install(**conf)
    else:
        del config['coloredlogs']  # in case 'coloredlogs': null or {}

    config.setdefault('version', 1)
    logging.config.dictConfig(config)
Example #13
0
def _get_cls_name(config: Any, pop: bool = True) -> str:
    if "_target_" not in config:
        raise InstantiationException("Input config does not have a `_target_` field")

    if pop:
        classname = config.pop("_target_")
    else:
        classname = config["_target_"]
    if not isinstance(classname, str):
        raise InstantiationException("_target_ field type must be a string")
    return classname
Example #14
0
def run():
    config_path = os.path.abspath(sys.argv[1])
    with open(config_path, 'r') as config_file:
        config = yaml.load(config_file)
    logging.config.dictConfig(config.pop('logging', constants.DEFAULT_LOGGING))
    try:
        app = App(config)
        app.run()
    except:
        logging.exception('Exception: ')
        raise
Example #15
0
def run():
    config_path = os.path.abspath(sys.argv[1])
    with open(config_path, 'r') as config_file:
        config = yaml.load(config_file)
    logging.config.dictConfig(config.pop('logging', constants.DEFAULT_LOGGING))
    try:
        app = App(config)
        app.run()
    except:
        logging.exception('Exception: ')
        raise
Example #16
0
def sms(conf):
    '''Set up SMS service'''
    for name, config in conf.items():
        _key = cache_key('sms', config)
        if _key in _cache:
            info.sms[name] = _cache[_key]
            continue
        notifier_type = config.pop('type')
        if notifier_type not in sms_notifiers:
            raise ValueError('sms: %s: Unknown type: %s' %
                             (name, notifier_type))
        info.sms[name] = _cache[_key] = sms_notifiers[notifier_type](**config)
Example #17
0
    def __init__(self,
                 config_path,
                 log_file_name=None,
                 n_jobs=1,
                 disable_logging=True):
        self.n_jobs = n_jobs

        self.disable_logging = disable_logging
        if disable_logging:
            return

        with open(config_path, 'r') as f:
            config = json.load(f)

        self._log_queue = None
        self._logging_config = config

        logging.config.dictConfig(config)

        logger = logging.getLogger("genens")

        if log_file_name is not None:
            logger.addHandler(logging.FileHandler(log_file_name))

        for handler in logger.handlers:
            print(handler.level)

        # special setup for multiprocessing
        if self.n_jobs != 1:
            mp_manager = Manager()
            self._log_queue = mp_manager.Queue()

            config.pop("handlers", None)
            config["loggers"]["genens"].pop("handlers", None)

            self._logging_config = config

            handler = QueueHandler(self._log_queue)
            logger = logging.getLogger("genens")
            logger.addHandler(handler)
Example #18
0
def main():
    args = parse_args()
    daemon_command = args.daemon_command
    config = load_config_file(args.config_file)
    logging_config = config.pop('logging', None)
    daemon_config = config.pop('daemon')
    seneschal_config = config.pop('seneschal')
    try:
        if daemon_command == 'start':
            start(logging_config, daemon_config, seneschal_config)
        else:
            config_logging(logging_config)
            if daemon_command == 'stop':
                stop(daemon_config)
            else:
                engine = Engine(seneschal_config)
                if daemon_command == 'sweep':
                    engine.sweep()
    except Exception as e:
        emit_message(e)
        sys.exit(1)
    finally:
        logging.shutdown()
Example #19
0
    def __init__(self, output, config):
        """

        :param output: An output class which implements a send(msg) function.
        :param config: A config dict containing 'request' and 'scraper' keys and a list of URLs.
        e.g.
        {'scraper': {'frequency': 60},
         'request': {'timeout': 10},

         # This timeout overrides the above
         'https://www.google.com': {'request': {'timeout': 5}},
         'https://news.ycombinator.com: {'pattern': 'Hacker N.*'},
        }

        frequency: The number of seconds to sleep between scrapes.
        request: a dict containing any kwargs options passed to requests.get
        pattern: an optional regex to search for.

        """
        self.output = output
        self.request_options = config.pop('request', None)
        self.scraper_options = config.pop('scraper', None)
        self.scrapes = config
        pass
Example #20
0
    def _load_from_file(filename):
        """Internal recursive file loader."""
        print "config -- reading {}".format(filename)
        if not os.path.exists(filename):
            raise Exception('{0} does not exist'.format(filename))
        with open(filename, 'r') as f:
            config = yaml.safe_load(f)

        extends = config.pop('extends', None)
        if extends:
            extends = os.path.abspath(
                os.path.join(os.path.dirname(filename), extends))
            config = dict_merge(config,
                                Configuration._load_from_file(extends))

        return config
Example #21
0
def merge_dict_config(*dict_configs):
    """ Merge two or more logging dict configs, in a somewhat sane manner.

    This currently does a minimal job of merging. The only actual merged
    configuration is:

    disable_existing_loggers
        If set in *any* of the configs, this will also be set in the merged
        config

    root.handlers
        Handlers from all dict_configs will be applied to the root logger.

    All other config values from latter config_dicts will overwrite the value
    from previous config_dicts.

    Note: any dict objects passed in *will* be mutated.
    """
    merged_config = {}

    # merge 'version'
    versions = set(c.pop('version') for c in dict_configs)
    if len(versions) > 1:
        raise NotImplementedError("merge multiple config versions: {0}".format(
            ', '.join(versions)))
    merged_config['version'] = versions.pop()

    # merge 'disable_existing_loggers'
    merged_config['disable_existing_loggers'] = any(
        c.pop('disable_existing_loggers', False) for c in dict_configs)

    for config in dict_configs:
        # merge root handlers
        merged_h = merged_config.get('root', {}).get('handlers', [])
        if 'root' in config:
            merged_config['root'] = config.pop('root')
            for h in reversed(merged_h):
                merged_config['root'].setdefault('handlers', []).insert(0, h)

        # merge filters, loggers, handlers, formatters
        for k in config:
            merged_config.setdefault(k, {}).update(config[k])

    return merged_config
Example #22
0
def load(filename='config.yml') -> dict:
    try:
        config: dict = yaml.safe_load(open(filename, encoding='utf-8'))

        logconf: dict = config.pop('logging', None)
        if logconf:
            if logconf.get('level'):
                logging.basicConfig(**logconf)
            else:
                logconf.setdefault('version', 1)
                logconf.setdefault('disable_existing_loggers', False)
                logging.config.dictConfig(logconf)

        return config

    except:
        logging.getLogger(__name__).exception(f"Error load yaml config")

        return {}
Example #23
0
        e, tb = sys.exc_info()[1:]
        v = ValueError('Cannot resolve %r: %s' % (s, e))
        v.__cause__, v.__traceback__ = e, tb
        raise v


formatters = {}
for f in LOGGING['formatters'].keys():
    formatters[f] = logging.Formatter(LOGGING['formatters'][f]['format'])

handlers = {}
for h in LOGGING['handlers'].keys():
    config = {}
    for k, v in LOGGING['handlers'][h].items():
        config[k] = v
    factory = resolve(config.pop('class'))
    formatter = config.pop('formatter')
    if formatter:
        config['format'] = formatters[LOGGING['handlers'][h]['formatter']]
    config.pop('level')
    config.pop('format')
    if 'stream' in config:
        stream = config.pop('stream')
        config['strm'] = resolve(stream.split('/')[-1])
    handlers[h] = factory(**config)
    handlers[h].setLevel(logging._levelNames[LOGGING['handlers'][h]['level']])
    handlers[h].setFormatter(formatters[LOGGING['handlers'][h]['formatter']])
for l in LOGGING['loggers'].keys():
    logger = logging.getLogger(l)
    for h in LOGGING['loggers'][l]['handlers']:
        logger.addHandler(handlers[h])
Example #24
0
def init_logging(config=None, prefix="gelutils"):
    """Initialize logging.

    Set up standard logging system based on values provided by argsns, namely:
    - loglevel
    - logtofile
    - testing

    """

    if config is None:
        config = {}
    elif not isinstance(config, dict):
        # Assume it is a NameSpace object returned by argparse.ArgumentParser.parse_args()
        config = config.__dict__

    dictconfig_fn = config.get("logging_config_dict_file")
    dictconfig = config.get("logging_config_dict")
    if dictconfig_fn or dictconfig:
        if dictconfig_fn:
            fnbase, fnext = os.path.splitext(dictconfig_fn)
            if fnext.lower() == ".yaml":
                print(
                    "Configuring logging system using dict config from yaml-formatted file:",
                    dictconfig_fn)
                with open(dictconfig_fn) as fp:
                    dictconfig = yaml.load(fp)
            else:
                print(
                    "Configuring logging system using dict config from json-formatted file:",
                    dictconfig_fn)
                with open(dictconfig_fn) as fp:
                    dictconfig = json.load(fp)
        else:
            print(
                "Configuring logging system using dict from logging_control settings file."
            )
        logging.config.dictConfig(dictconfig)
        logger.info("Logging system initialized using dict-config " + (
            "from file %s." %
            dictconfig_fn if dictconfig_fn else "from config-provided dict."))

    else:
        loglevel = get_loglevel_as_integer(config.pop('loglevel', None))
        logtofile = config.pop('logtofile', None)
        if logtofile:
            logtofile = os.path.expanduser(logtofile)

        # Examples of different log formats:
        # logfmt = "%(levelname)s: %(filename)s:%(lineno)s %(funcName)s() > %(message)s"
        # logfmt = "%(levelname)s %(name)s:%(lineno)s %(funcName)s() > %(message)s"
        # loguserfmt = format of log displayed to the user; logfilefmt = format of log messages written to logfile.
        logconsolefmt = "%(asctime)s %(levelname)-15s %(name)20s:%(lineno)-4s%(funcName)20s() %(message)s"
        logfilefmt = '%(asctime)s %(levelname)-6s - %(name)s:%(lineno)s - %(funcName)s() - %(message)s'
        logdatefmt = "%Y%m%d-%H:%M:%S"  # "%Y%m%d-%Hh%Mm%Ss"
        logtimefmt = "%H:%M:%S"  # Output to user in console
        logformat = config.pop('logformat',
                               logfilefmt if logtofile else logconsolefmt)
        logdatefmt = config.pop('logdatefmt',
                                logdatefmt if logtofile else logtimefmt)
        logging.basicConfig(level=loglevel,
                            format=logformat,
                            datefmt=logdatefmt,
                            filename=logtofile)
        logger.info(
            "Logging system initialized with loglevel %s, logfile filename=%s",
            loglevel, logtofile)
Example #25
0
    def configure_handler(self, config):
        """
        Configure a handler from a dictionary.
        """

        config_copy = dict(config)  # for restoring in case of error
        formatter = config.pop('formatter', None)
        if formatter:
            try:
                formatter = self.config['formatters'][formatter]
            except Exception as e:
                raise ValueError('Unable to set formatter '
                                 '%r: %s' % (formatter, e))
        level = config.pop('level', None)
        filters = config.pop('filters', None)
        if '()' in config:
            c = config.pop('()')
            if not callable(c):
                c = self.resolve(c)
            factory = c
        else:
            cname = config.pop('class')
            klass = self.resolve(cname)
            #Special case for handler which refers to another handler
            if issubclass(klass, logging.handlers.MemoryHandler) and\
                'target' in config:
                try:
                    th = self.config['handlers'][config['target']]
                    if not isinstance(th, logging.Handler):
                        config.update(config_copy)  # restore for deferred cfg
                        raise TypeError('target not configured yet')
                    config['target'] = th
                except Exception as e:
                    raise ValueError('Unable to set target handler '
                                     '%r: %s' % (config['target'], e))
            elif issubclass(klass, logging.handlers.SMTPHandler) and\
                'mailhost' in config:
                config['mailhost'] = self.as_tuple(config['mailhost'])
            elif issubclass(klass, logging.handlers.SysLogHandler) and\
                'address' in config:
                config['address'] = self.as_tuple(config['address'])
            factory = klass

        #Enable position arguments
        args = []
        if "args" in config:
            args = config["args"]
            del config["args"]

        props = config.pop('.', None)
        kwargs = dict([(k, config[k]) for k in config
                       if logging.config.valid_ident(k)])
        try:
            result = factory(*args,
                             **kwargs)  #initialize with positional arguments
        except TypeError as te:
            if "'stream'" not in str(te):
                raise
            #The argument name changed from strm to stream
            #Retry with old name.
            #This is so that code can be used with older Python versions
            #(e.g. by Django)
            kwargs['strm'] = kwargs.pop('stream')
            result = factory(**kwargs)
        if formatter:
            result.setFormatter(formatter)
        if level is not None:
            result.setLevel(logging._checkLevel(level))
        if filters:
            self.add_filters(result, filters)
        if props:
            for name, value in props.items():
                setattr(result, name, value)
        return result
Example #26
0
    def __init__(self,
                 probe: Optional["DebugProbe"],
                 auto_open: bool = True,
                 options: Optional[Mapping[str, Any]] = None,
                 option_defaults: Optional[Mapping[str, Any]] = None,
                 **kwargs) -> None:
        """@brief Session constructor.

        Creates a new session using the provided debug probe. Session options are merged from the
        _options_ parameter and any keyword arguments. Normally a board instance is created that can
        either be a generic board or a board associated with the debug probe.

        Note that the 'project_dir' and 'config' options must be set in either keyword arguments or
        the _options_ parameter.

        Passing in a _probe_ that is None is allowed. This is useful to create a session that operates
        only as a container for session options. In this case, the board instance is not created, so the
        #board attribute will be None. Such a Session cannot be opened.

        @param self
        @param probe The @ref pyocd.probe.debug_probe. "DebugProbe" instance. May be None.
        @param auto_open Whether to automatically open the session when used as a context manager.
        @param options Optional session options dictionary.
        @param option_defaults Optional dictionary of session option values. This dictionary has the
            lowest priority in determining final session option values, and is intended to set new
            defaults for option if they are not set through any other method.
        @param kwargs Session options passed as keyword arguments.
        """
        super().__init__()

        Session._current_session = weakref.ref(self)

        self._probe = probe
        self._closed: bool = True
        self._inited: bool = False
        self._user_script_namespace: Dict[str, Any] = {}
        self._user_script_proxy: Optional[UserScriptDelegateProxy] = None
        self._user_script_print_proxy = PrintProxy()
        self._delegate: Optional[Any] = None
        self._auto_open = auto_open
        self._options = OptionsManager()
        self._gdbservers: Dict[int, "GDBServer"] = {}
        self._probeserver: Optional["DebugProbeServer"] = None

        # Set this session on the probe, if we were given a probe.
        if probe is not None:
            probe.session = self

        # Update options.
        self._options.add_front(kwargs)
        self._options.add_back(options)

        # Init project directory.
        if self.options.get('project_dir') is None:
            self._project_dir: str = os.environ.get(
                'PYOCD_PROJECT_DIR') or os.getcwd()
        else:
            self._project_dir: str = os.path.abspath(
                os.path.expanduser(self.options.get('project_dir')))
        LOG.debug("Project directory: %s", self.project_dir)

        # Switch the working dir to the project dir.
        os.chdir(self.project_dir)

        # Load options from the config file.
        config = self._get_config()
        probes_config = config.pop('probes', None)

        # Pick up any config file options for this probe. These have priority over global options.
        if (probe is not None) and (probes_config is not None):
            did_match_probe = False
            for uid, settings in probes_config.items():
                if str(uid).lower() in probe.unique_id.lower():
                    if did_match_probe:
                        LOG.warning(
                            "Multiple probe config options match probe ID %s",
                            probe.unique_id)
                        break
                    LOG.info("Using config options for probe %s" %
                             (probe.unique_id))
                    self._options.add_back(settings)
                    did_match_probe = True

        # Add global config options.
        self._options.add_back(config)

        # Merge in lowest priority options.
        self._options.add_back(option_defaults)

        # Logging config.
        self._configure_logging()

        # Bail early if we weren't provided a probe.
        if probe is None:
            self._board = None
            return

        # Load the user script.
        self._load_user_script()

        # Ask the probe if it has an associated board, and if not then we create a generic one.
        self._board = probe.create_associated_board() or Board(self)
Example #27
0
 def _instanciate_class(self, module, config):
     logger.debug(config)
     clazz = getattr(module, config.pop('class'))
     return clazz(**config)
Example #28
0
        e, tb = sys.exc_info()[1:]
        v = ValueError('Cannot resolve %r: %s' % (s, e))
        v.__cause__, v.__traceback__ = e, tb
        raise v


formatters = {}
for f in LOGGING['formatters'].keys():
    formatters[f] = logging.Formatter(LOGGING['formatters'][f]['format'])

handlers = {}
for h in LOGGING['handlers'].keys():
    config = {}
    for k, v in LOGGING['handlers'][h].items():
        config[k] = v
    factory = resolve(config.pop('class'))
    formatter = config.pop('formatter')
    if formatter:
        config['format'] = formatters[LOGGING['handlers'][h]['formatter']]
    config.pop('level')
    config.pop('format')
    if 'stream' in config:
        stream = config.pop('stream')
        config['strm'] = resolve(stream.split('/')[-1])
    handlers[h] = factory(**config)
    handlers[h].setLevel(logging._levelNames[LOGGING['handlers'][h]['level']])
    handlers[h].setFormatter(formatters[LOGGING['handlers'][h]['formatter']])
for l in LOGGING['loggers'].keys():
    logger = logging.getLogger(l)
    for h in LOGGING['loggers'][l]['handlers']:
        logger.addHandler(handlers[h])
Example #29
0
    def __init__(self,
                 probe,
                 auto_open=True,
                 options=None,
                 option_defaults=None,
                 **kwargs):
        """! @brief Session constructor.
        
        Creates a new session using the provided debug probe. User options are merged from the
        _options_ parameter and any keyword arguments. Normally a board instance is created that can
        either be a generic board or a board associated with the debug probe.
        
        Precedence for user options:
        1. Keyword arguments to constructor.
        2. _options_ parameter to constructor.
        3. Probe-specific options from a config file.
        4. General options from a config file.
        5. _option_defaults_ parameter to constructor.
        
        Note that the 'project_dir' and 'config' options must be set in either keyword arguments or
        the _options_ parameter.
        
        Passing in a _probe_ that is None is allowed. This is useful to create a session that operates
        only as a container for user options. In this case, the board instance is not created, so the
        #board attribute will be None. Such a Session cannot be opened.
        
        @param self
        @param probe The DebugProbe instance. May be None.
        @param auto_open Whether to automatically open the session when used as a context manager.
        @param options Optional user options dictionary.
        @param option_defaults Optional dictionary of user option values. This dictionary has the
            lowest priority in determining final user option values, and is intended to set new
            defaults for option if they are not set through any other method.
        @param kwargs User options passed as keyword arguments.
        """
        super(Session, self).__init__()

        Session._current_session = weakref.ref(self)

        self._probe = probe
        self._closed = True
        self._inited = False
        self._user_script_proxy = None
        self._delegate = None
        self._auto_open = auto_open
        self._options = OptionsManager()

        # Set this session on the probe, if we were given a probe.
        if probe is not None:
            probe.session = self

        # Update options.
        self._options.add_front(kwargs)
        self._options.add_back(options)

        # Init project directory.
        if self.options.get('project_dir') is None:
            self._project_dir = os.getcwd()
        else:
            self._project_dir = os.path.abspath(
                os.path.expanduser(self.options.get('project_dir')))
        LOG.debug("Project directory: %s", self.project_dir)

        # Apply common configuration settings from the config file.
        config = self._get_config()
        probesConfig = config.pop('probes', None)
        self._options.add_back(config)

        # Pick up any config file options for this board.
        if (probe is not None) and (probesConfig is not None):
            for uid, settings in probesConfig.items():
                if str(uid).lower() in probe.unique_id.lower():
                    LOG.info("Using config settings for probe %s" %
                             (probe.unique_id))
                    self._options.add_back(settings)

        # Merge in lowest priority options.
        self._options.add_back(option_defaults)

        # Logging config.
        self._configure_logging()

        # Bail early if we weren't provided a probe.
        if probe is None:
            self._board = None
            return

        # Ask the probe if it has an associated board, and if not then we create a generic one.
        self._board = probe.create_associated_board() \
                        or Board(self, self.options.get('target_override'))
Example #30
0
    def start(station_id, config, processes, is_daemon, home_dir, verbose):
        """ Start workers """

        if config:
            if verbose:
                click.secho("Loading configs from: {}".format(config),
                            fg="green")
            config = ConfigsLoader.load(config)
        else:
            if verbose:
                click.secho("Loading default configs", fg="green")
            config = Config.make_with({})

        # logging config
        log_config = config.pop("LOG_CONFIG", None)
        if log_config:
            logging.config.dictConfig(log_config)

        # repo config
        repo_config = config.pop("REPO_CONFIG", None)
        if repo_config:
            repo_cls = repo_config.get(
                "repo_cls",
                "tethys.core.regobjs.repositories.repo_mongo:MongodbRepository",
            )
            repo_params = repo_config.get("repo_params", [])

            if isinstance(repo_cls, str):
                repo_cls = import_object_source(repo_cls)

            if not issubclass(repo_cls, RepositoryBase):
                raise ValueError("Bad configs: unsupported 'repo_cls' type")

            if isinstance(repo_params, dict):
                repo = repo_cls(**repo_params)
            elif isinstance(repo_params, Iterable):
                repo = repo_cls(*repo_params)
            else:
                raise ValueError("Bad configs: unsupported 'repo_params' type")

            ZeroRegistrableObject.REPOSITORY = repo

            if verbose:
                click.secho("Repository was changed to {}".format(repo),
                            fg="green")

        # station config
        station_config = {
            str(key).lower(): value
            for key, value in config.get("STATION_CONFIG", {}).items()
        }

        station_args = Worker._get_station_kwargs(ZeroStation)
        unrecognized_args = station_config.keys() - set(station_args)
        if unrecognized_args:
            logging.warning("Unrecognized station arguments: %s",
                            ",".join(unrecognized_args))

        if processes:
            station_config["max_processes_count"] = int(processes)

        station_id = (station_id or config.pop("STATION_ID", None)
                      or ZeroStation.generate_id())

        station_obj = ZeroStation(**station_config, _id=station_id)
        station_obj.save()

        #  worker config
        home_dir = home_dir or config.pop("STATION_ID", ".tethys")
        pid_files_dir = os.path.join(home_dir, "pid_files")
        if home_dir not in [".", ".."] and not os.path.exists(home_dir):
            if verbose:
                click.secho("Creating home dir: {}".format(home_dir),
                            fg="green")
            os.makedirs(home_dir)

        if not os.path.exists(pid_files_dir):
            os.makedirs(pid_files_dir)

        pid_name = "{}.station.pid".format(station_id)
        pid_file_name = os.path.join(pid_files_dir, pid_name)
        pid_file = PIDLockFile(pid_file_name, threaded=True)

        if config.pop("IS_DAEMON", is_daemon):
            if verbose:
                click.secho("Starting station as daemon...", fg="green")

            with daemon.DaemonContext(working_directory=home_dir,
                                      pidfile=pid_file):
                station_obj.start()

        else:
            if verbose:
                click.secho("Creating pid file...", fg="green")

            with pid_file:

                if verbose:
                    click.secho("Starting station...", fg="green")

                station_obj.start()
Example #31
0
    def __init__(self, name, dist, _app_name, style, styles, logger, handler,
                 formatter, exceptions, **sections):
        super(Logger, self).__init__(name,
                                     dist,
                                     style=style,
                                     styles=styles,
                                     logger=logger,
                                     handler=handler,
                                     formatter=formatter,
                                     exceptions=exceptions,
                                     **sections)
        colorama.init(autoreset=True)

        colors = (styles.get(style) or STYLES.get(style)
                  or STYLES['nocolors']).copy()
        colors = {
            name: ''.join(COLORS.get(c.upper(), '') for c in color)
            for name, color in colors.items()
        }

        configurator = DictConfigurator()

        # Application logger
        # ------------------

        logger_name = 'nagare.application.' + _app_name
        log.set_logger(logger_name)

        logger['level'] = logger['level'] or 'ERROR'

        if handler['class']:
            handler['()'] = configurator.create_handler
        else:
            handler.setdefault('stream', 'ext://sys.stderr')

            if 'class' not in formatter:
                del handler['class']
                handler['()'] = lambda stream: ColorizingStreamHandler(
                    stream,
                    colorizer.GenericColorizer({
                        name: (color, COLORS['RESET_ALL'])
                        for name, color in colors.items()
                    }))

                formatter['class'] = 'chromalog.ColorizingFormatter'
            else:
                handler['class'] = 'logging.StreamHandler'

        loggers = {logger_name: dict(logger, handlers=[logger_name])}
        handlers = {logger_name: dict(handler, formatter=logger_name)}
        formatters = {logger_name: formatter}

        # Other loggers
        # -------------

        for name, config in sections.items():
            if name.startswith('logger_'):
                name = config.pop('qualname')
                if name.startswith('.'):
                    name = logger_name + name

                if name == 'root':
                    name = ''

                config['propagate'] = config.get('propagate', '1') == '1'

                handler = config.get('handlers')
                if handler:
                    config['handlers'] = handler.split(',')

                loggers[name] = config

            if name.startswith('handler_'):
                handlers[name[8:]] = config
                handlers[name[8:]]['()'] = configurator.create_handler

            if name.startswith('formatter_'):
                formatters[name[10:]] = config

        # Root logger
        # -----------

        root = loggers.get('', {})
        root.setdefault('level', 'INFO')

        if 'handlers' not in root:
            root['handlers'] = ['_root_handler']

            handlers['_root_handler'] = {
                'stream':
                'ext://sys.stderr',
                'formatter':
                '_root_formatter',
                '()':
                lambda stream: ColorizingStreamHandler(
                    stream,
                    colorizer.GenericColorizer({
                        name: (color, COLORS['RESET_ALL'])
                        for name, color in colors.items()
                    }))
            }

            formatters['_root_formatter'] = {
                'class': 'chromalog.ColorizingFormatter',
                'format':
                '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
            }

        loggers[''] = root

        logging_config = {
            'version': 1,
            'loggers': loggers,
            'handlers': handlers,
            'formatters': formatters
        }

        configurator.configure(logging_config)

        # Colorized exceptions
        # --------------------

        exception_logger = logging.getLogger('nagare.services.exceptions')
        if style and not exception_logger.handlers:
            handler = self.create_exception_handler(colors, **exceptions)
            exception_logger.addHandler(handler)
Example #32
0
    def __init__(self, probe, auto_open=True, options=None, option_defaults=None, **kwargs):
        """! @brief Session constructor.
        
        Creates a new session using the provided debug probe. User options are merged from the
        _options_ parameter and any keyword arguments. Normally a board instance is created that can
        either be a generic board or a board associated with the debug probe.
        
        Precedence for user options:
        1. Keyword arguments to constructor.
        2. _options_ parameter to constructor.
        3. Probe-specific options from a config file.
        4. General options from a config file.
        5. _option_defaults_ parameter to constructor.
        
        Note that the 'project_dir' and 'config' options must be set in either keyword arguments or
        the _options_ parameter.
        
        Passing in a _probe_ that is None is allowed. This is useful to create a session that operates
        only as a container for user options. In this case, the board instance is not created, so the
        #board attribute will be None. Such a Session cannot be opened.
        
        @param self
        @param probe The DebugProbe instance. May be None.
        @param auto_open Whether to automatically open the session when used as a context manager.
        @param options Optional user options dictionary.
        @param option_defaults Optional dictionary of user option values. This dictionary has the
            lowest priority in determining final user option values, and is intended to set new
            defaults for option if they are not set through any other method.
        @param kwargs User options passed as keyword arguments.
        """
        super(Session, self).__init__()
        
        Session._current_session = weakref.ref(self)
        
        self._probe = probe
        self._closed = True
        self._inited = False
        self._user_script_proxy = None
        self._delegate = None
        self._auto_open = auto_open
        self._options = OptionsManager()
        
        # Update options.
        self._options.add_front(kwargs)
        self._options.add_back(options)
        
        # Init project directory.
        if self.options.get('project_dir') is None:
            self._project_dir = os.getcwd()
        else:
            self._project_dir = os.path.abspath(os.path.expanduser(self.options.get('project_dir')))
        LOG.debug("Project directory: %s", self.project_dir)
            
        # Apply common configuration settings from the config file.
        config = self._get_config()
        probesConfig = config.pop('probes', None)
        self._options.add_back(config)

        # Pick up any config file options for this board.
        if (probe is not None) and (probesConfig is not None):
            for uid, settings in probesConfig.items():
                if str(uid).lower() in probe.unique_id.lower():
                    LOG.info("Using config settings for probe %s" % (probe.unique_id))
                    self._options.add_back(settings)
        
        # Merge in lowest priority options.
        self._options.add_back(option_defaults)
        
        # Logging config.
        self._configure_logging()
        
        # Bail early if we weren't provided a probe.
        if probe is None:
            self._board = None
            return
            
        # Ask the probe if it has an associated board, and if not then we create a generic one.
        self._board = probe.create_associated_board(self) \
                        or Board(self, self.options.get('target_override'))