Exemple #1
0
def get_config(config_file):
    """Parse agent config params in specified yaml file and return as Python dict"""
    with open(config_file) as f:
        config = yaml.load(f, Loader=yaml.FullLoader)
        # if missing in the config, use following defaults
        config.setdefault('shuffle_nodes', False)
    return config
Exemple #2
0
def init_logging():
    logger_conf_path = os.path.realpath(
        os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "logconf.yaml"))
    with open(logger_conf_path) as config_file:
        config = yaml.load(config_file)
        config.setdefault('version', 1)
        logging.config.dictConfig(config)
def get_app(config=None):
    """App factory.

    :param config: configuration that can override config from `settings.py`
    :return: a new SuperdeskEve app instance
    """
    if config is None:
        config = {}

    config['APP_ABSPATH'] = os.path.abspath(os.path.dirname(__file__))

    for key in dir(settings):
        if key.isupper():
            config.setdefault(key, getattr(settings, key))

    media_storage = None
    if config['AMAZON_CONTAINER_NAME']:
        from superdesk.storage.amazon.amazon_media_storage import AmazonMediaStorage
        media_storage = AmazonMediaStorage

    config['DOMAIN'] = {}

    app = superdesk_app(config, media_storage)
    configure_logging(config['LOG_CONFIG_FILE'])
    return app
Exemple #4
0
def setup_logging(config):
    """
    Takes a logging configuration and ensures the root logger has a
    handler named "default", which can be referenced in the logging
    configuration.  Most loggers will defer to this default handler
    implicitly, by way of propagation.  The augmented configuration
    is then handed off to logging.config.dictConfig().

    This function should only be called once at program startup.

    :param config: Logging configuration dictionary
    :type config: dict
    """
    formatters = config.setdefault('formatters', {})
    if 'default' not in formatters:
        formatters['default'] = {
            'format': DEFAULT_FORMAT
        }

    handlers = config.setdefault('handlers', {})
    if 'default' not in handlers:
        handlers['default'] = {
            'class': 'logging.StreamHandler',
            'formatter': 'default'
        }

    root_logger = config.setdefault('root', {})
    root_handlers = root_logger.setdefault('handlers', [])
    if 'default' not in root_handlers:
        root_handlers.append('default')

    logging.config.dictConfig(config)
Exemple #5
0
def get_app(config=None, init_elastic=False):
    """App factory.

    :param config: configuration that can override config from `settings.py`
    :return: a new SuperdeskEve app instance
    """
    if config is None:
        config = {}

    config['APP_ABSPATH'] = os.path.abspath(os.path.dirname(__file__))

    for key in dir(settings):
        if key.isupper():
            config.setdefault(key, getattr(settings, key))

    media_storage = None
    if config['AMAZON_CONTAINER_NAME']:
        from superdesk.storage.amazon.amazon_media_storage import AmazonMediaStorage
        media_storage = AmazonMediaStorage

    config['DOMAIN'] = {}

    app = superdesk_app(config, media_storage, init_elastic=init_elastic)
    configure_logging(config['LOG_CONFIG_FILE'])
    return app
Exemple #6
0
def init_logging():
    logger_conf_path = os.path.realpath(
        os.path.join(os.path.dirname(os.path.realpath(__file__)), "..",
                     "logconf.yaml"))
    with open(logger_conf_path) as config_file:
        config = yaml.load(config_file)
        config.setdefault('version', 1)
        logging.config.dictConfig(config)
Exemple #7
0
def read_config():
    if not os.path.exists(CONFIG_FILENAME):
        init()
    cp = configparser.ConfigParser()
    cp.read(CONFIG_FILENAME)
    config = dict(cp['ciqw'])
    for k, v in DEFAULT_CONFIG.items():
        config.setdefault(k, v)
    return config
Exemple #8
0
    def process_user_args(user_args):
        def get_opt_val(opt_name, cfg_dict, default=False):
            default_val = 'not_found' if default else None
            opt_val = cfg_dict.get(opt_name, 'not_found')
            if opt_val == 'not_found' and args.get('subcommand'):
                opt_val = cfg_dict.get(args['subcommand'],
                                       {}).get(opt_name, default_val)
            return opt_val

        for arg_name, arg_val in list(user_args.items()):
            """
            if arg_name in ignored_args:
                continue
            """
            if isinstance(arg_val, dict):
                if args['subcommand'] == arg_name:
                    process_user_args(arg_val)
                    del config[args['subcommand']]
                else:
                    del config[arg_name]
                continue
            arg_val = get_opt_val(arg_name, user_args)
            default_val = get_opt_val(arg_name, default_config, default=True)
            if arg_val is not None:
                if arg_val != default_val:
                    # User specified a value in the command-line/config file
                    config[arg_name] = arg_val
                    if default_val == 'not_found':
                        results.args_not_found_in_config.append(
                            (arg_name, default_val, arg_val))
                    else:
                        results.default_args_overridden.append(
                            (arg_name, default_val, arg_val))
                else:
                    # User didn't change the config value (same as default one)
                    # TODO: factorize
                    if config.get(arg_name, 'not_found') != 'not_found':
                        config[arg_name] = arg_val
                    else:
                        config.setdefault(arg_name, arg_val)
            else:
                if default_val != 'not_found':
                    if config.get(arg_name, 'not_found') != 'not_found':
                        config[arg_name] = default_val
                    else:
                        config.setdefault(arg_name, default_val)
                else:
                    # import ipdb
                    # ipdb.set_trace()
                    raise AttributeError("No value could be found for the "
                                         f"argument '{arg_name}'")
Exemple #9
0
def update_config(path_or_json=None,
                  incremental=True,
                  listen_port=logging.config.DEFAULT_LOGGING_CONFIG_PORT):
    config = _get_config(path_or_json)

    # Don't overwrite full config by default
    config.setdefault('incremental', incremental)

    data_to_send = json.dumps(config)

    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.connect(('localhost', listen_port))
    s.send(struct.pack('>L', len(data_to_send)))
    s.send(data_to_send)
    s.close()
Exemple #10
0
def configure_logging(config):
    config = config.copy()
    if config.pop('http_debug', False):
        http.client.HTTPConnection.debuglevel = 1
    else:
        http.client.HTTPConnection.debuglevel = 0

    if config.get('coloredlogs'):
        conf = config.pop('coloredlogs').copy()
        conf['field_styles'] = dict_merge(coloredlogs.DEFAULT_FIELD_STYLES,
                                          conf.get('field_styles', {}))
        conf['level_styles'] = dict_merge(coloredlogs.DEFAULT_LEVEL_STYLES,
                                          conf.pop('level_styles', {}))
        coloredlogs.install(**conf)
    else:
        del config['coloredlogs']  # in case 'coloredlogs': null or {}

    config.setdefault('version', 1)
    logging.config.dictConfig(config)
Exemple #11
0
def get_app(config=None):
    """App factory.

    :param config: configuration that can override config from `settings.py`
    :return: a new SuperdeskEve app instance
    """
    if config is None:
        config = {}

    config['APP_ABSPATH'] = os.path.abspath(os.path.dirname(__file__))

    for key in dir(settings):
        if key.isupper():
            config.setdefault(key, getattr(settings, key))

    config['DOMAIN'] = {}

    app = superdesk_app(config)
    configure_logging(config['LOG_CONFIG_FILE'])
    return app
Exemple #12
0
def get_app(config=None):
    """App factory.

    :param config: configuration that can override config from `settings.py`
    :return: a new SuperdeskEve app instance
    """
    if config is None:
        config = {}

    config['APP_ABSPATH'] = os.path.abspath(os.path.dirname(__file__))

    for key in dir(settings):
        if key.isupper():
            config.setdefault(key, getattr(settings, key))

    config['DOMAIN'] = {}

    app = superdesk_app(config)
    configure_logging(config['LOG_CONFIG_FILE'])
    return app
Exemple #13
0
def _add_config_defauts(config):
    config.setdefault('version', 1)
    config.setdefault('disable_existing_loggers', False)
    config.setdefault('formatters', {
        'standard': {
            'format': CONFIG_FORMATTER
        },
    })
Exemple #14
0
def _get_config(path_or_dict=None):
    """ Loads a yaml/json file, or python dict """

    if path_or_dict is None:
        path_or_dict = os.environ.get('LOG_CONFIG', None)

    config = {}
    if isinstance(path_or_dict, dict):
        config = path_or_dict

    elif os.path.exists(path_or_dict):
        with open(path_or_dict) as f:
            data = f.read()

            # Can load both yaml and json files
            config = yaml.safe_load(data)

    elif path_or_dict is not None:
        raise ValueError(('Config could not be loaded: ', str(path_or_dict)))
    else:
        LOG.info('Using default logging config')

    # always need a default formatter, handler and logger
    config.setdefault('formatters', {})
    config.setdefault('handlers', {})
    config.setdefault('loggers', {})

    default_log_format = config.pop('default_log_format', DEFAULT_LOG_FORMAT)
    default_log_level = config.pop('default_log_level', 'DEBUG')

    default_formatter = {
        'format': default_log_format,
        'class': 'colored_formatter.ColoredFormatter',
        'style': '{',
    }

    default_handler = {
        'level': 'DEBUG',
        'class': 'logging.StreamHandler',
        'formatter': 'default',
    }

    default_handlers = [
        handler for handler in config['handlers'].keys()
        if config['handlers'][handler] is not None
    ]

    default_logger = {'level': default_log_level, 'handlers': default_handlers}

    config['formatters'].setdefault('default', default_formatter)

    # logging to console can be disabled by setting the console handler to None
    config['handlers'].setdefault('console', default_handler)

    # set the global root logger config
    config['loggers'].setdefault('', default_logger)

    return config
Exemple #15
0
def configure_logging(daemon=False, daemon_log_file=None):
    """
    Setup the logging by retrieving the LOGGING dictionary from aiida and passing it to
    the python module logging.config.dictConfig. If the logging needs to be setup for the
    daemon running a task for one of the celery workers, set the argument 'daemon' to True
    and specify the path to the log file. This will cause a 'daemon_handler' to be added
    to all the configured loggers, that is a RotatingFileHandler that writes to the log file.

    :param daemon: configure the logging for a daemon task by adding a file handler instead
        of the default 'console' StreamHandler
    :param daemon_log_file: absolute filepath of the log file for the RotatingFileHandler
    """
    config = deepcopy(LOGGING)
    daemon_handler_name = 'daemon_log_file'

    # Add the daemon file handler to all loggers if daemon=True
    if daemon is True:

        if daemon_log_file is None:
            raise ValueError(
                'daemon_log_file has to be defined when configuring for the daemon'
            )

        config.setdefault('handlers', {})
        config['handlers'][daemon_handler_name] = {
            'level': 'DEBUG',
            'formatter': 'halfverbose',
            'class': 'logging.handlers.RotatingFileHandler',
            'filename': daemon_log_file,
            'encoding': 'utf8',
            'maxBytes': 100000,
        }

        for name, logger in config.get('loggers', {}).iteritems():
            logger.setdefault('handlers', []).append(daemon_handler_name)

    logging.config.dictConfig(config)
Exemple #16
0
def configure_logging(config):
    """One-time global logging configuration.

    Set up logging as described in ``config``.  ``config`` should be a
    top-level configuration dictionary with a key ``logging``, and the
    value of that key is used as a configuration dictionary for
    :mod:`logging.config`.  If there is no ``logging`` key but there
    is a ``version: 1`` key/value, ``config`` is used directly as the
    configuration.  Otherwise a minimal default configuration is used.

    If the configuration does not define any handlers, then a default
    console log handler will be created and bound to the root logger.
    This will use a formatter named ``fixed``, defining it if
    necessary.

    :param dict config: :mod:`logging.config` setup dictionary

    .. deprecated:: 0.4.0
        Pass :mod:`dblogger` to :func:`yakonfig.parse_args` instead.

    """
    # find the actual logging config dictionary
    if 'logging' in config:
        config = config['logging']
        config.setdefault('version', 1)
    elif config.get('version') == 1:
        config = config
    else:
        config = { 'version': 1 }

    # create default handler if required
    if len(config.setdefault('handlers', {})) == 0:
        config.setdefault('formatters', {})
        if 'fixed' not in config['formatters']:
            config['formatters']['fixed'] = {
                '()': 'dblogger.FixedWidthFormatter',
                'format': ('%(asctime)-23s pid=%(process)-5d '
                           '%(fixed_width_filename_lineno)s '
                           '%(fixed_width_levelname)s %(message)s'),
            }
        config['handlers']['console'] = {
            'class': 'logging.StreamHandler',
            'formatter': 'fixed',
        }
        config.setdefault('root', {})
        config['root']['handlers'] = ['console']
    
    # also, we must set this magic flag, or any logger created at the
    # module level will stop working
    config['disable_existing_loggers'] = False

    logging.config.dictConfig(config)
def load_config():
    config = []

    try:
        f = open(args.config_file).read()
        config = yaml.load(f)
    except IOError:
        print("Unable to read config file {0}".format(args.config_file))
        sys.exit(2)
    except (yaml.reader.ReaderError, yaml.parser.ParserError):
        print("Invalid YAML syntax in config file {0}".format(
            args.config_file))
        sys.exit(2)
    except:
        raise

    config.setdefault("couchbase_host", "localhost")
    config.setdefault("couchbase_admin_port", 8091)
    config.setdefault("couchbase_admin_port_ssl", 18091)
    config.setdefault("couchbase_query_port", 8093)
    config.setdefault("couchbase_query_port_ssl", 18093)
    config.setdefault("couchbase_fts_port", 8094)
    config.setdefault("couchbase_fts_port_ssl", 18094)
    config.setdefault("couchbase_ssl", True)
    config.setdefault("nagios_nsca_path", "/sbin/send_nsca")
    config.setdefault("service_include_cluster_name", False)
    config.setdefault("service_include_label", False)
    config.setdefault("send_metrics", True)
    config.setdefault("dump_services", False)
    config.setdefault("all_nodes", False)

    if args.all_nodes:
        config["all_nodes"] = True

    if args.dump_services:
        config["dump_services"] = True

    if args.no_metrics:
        config["send_metrics"] = False

    if args.couchbase_host:
        config["couchbase_host"] = args.couchbase_host

    if args.monitor_host:
        config["monitor_host"] = args.monitor_host

    if args.monitor_type:
        config["monitor_type"] = args.monitor_type

    if args.couchbase_user:
        config["couchbase_user"] = args.couchbase_user

    if args.couchbase_password:
        config["couchbase_password"] = args.couchbase_password

    if args.verbose:
        config["logging"]["handlers"]["console"]["level"] = "DEBUG"

    logging.config.dictConfig(config["logging"])

    if config["couchbase_ssl"] is True:
        config["couchbase_admin_port"] = config["couchbase_admin_port_ssl"]
        config["couchbase_query_port"] = config["couchbase_query_port_ssl"]
        config["couchbase_fts_port"] = config["couchbase_fts_port_ssl"]

    # Unrecoverable errors
    for item in [
            "couchbase_user", "couchbase_password", "monitor_type",
            "monitor_host", "monitor_port", "node", "data"
    ]:
        if item not in config:
            print("{0} is not set in {1}".format(item, args.config_file))
            sys.exit(2)

    for item in config["data"]:
        if "bucket" not in item or item["bucket"] is None:
            print("Bucket name is not set in {0}".format(args.config_file))
            sys.exit(2)

        if "metrics" not in item or item["metrics"] is None:
            print("Metrics are not set for bucket {0} in {1}".format(
                item["bucket"], args.config_file))
            sys.exit(2)

    return config
Exemple #18
0
                level = logging.getLevelName(level)
            level -= verbosity * 10
            # adjust to be in bounds
            level = min(level, logging.CRITICAL)
            level = max(level, logging.NOTSET)
            # bring back to a string
            try:
                level = logging.getLevelName(level)
            except KeyError, e:
                pass
            # stash
            config['handlers']['console']['level'] = level

    # Re-bind console logger if required
    if verbosity > 0:
        config.setdefault('root', {})
        config['root'].setdefault('handlers', [])
        if 'console' not in config['root']['handlers']:
            config['root']['handlers'].append('console')

    # Enable debug logging
    config.setdefault('loggers', {})
    for logger in config.get('debug', []):
        config['loggers'].setdefault(logger, {})
        config['loggers'][logger].setdefault('handlers', [])
        config['loggers'][logger]['handlers'].append('debug')

    # We've used these options and integrated them into other things,
    # so reset them
    for option in ('verbose', 'quiet', 'debug'):
        config[option] = default_config[option]
Exemple #19
0
def main(cli_args=None):
    if cli_args is not None:
        args = parser.parse_args(cli_args)
    else:
        args = parser.parse_args()

    logging.config.dictConfig({
        'version': 1,
        'disable_existing_loggers': False,
        'formatters': {
            'default': {
                'format': '%(levelname)s: %(message)s'
            },
        },
        'handlers': {
            'default': {
                'level': args.log_level,
                'class': 'logging.StreamHandler',
                'formatter': 'default',
            },
        },
        'loggers': {
            '': {
                'handlers': ['default'],
                'level': args.log_level,
                'propagate': True,
            }
        }
    })

    if args.config is not None:
        config = load_config(args.config)
    elif path.exists(path.expanduser(DEFAULT_CONFIG)):
        config = load_config(DEFAULT_CONFIG)
    elif path.exists(OLD_DEFAULT_CONFIG):
        config = load_config(OLD_DEFAULT_CONFIG)
    elif path.exists(path.expanduser(SITE_DEFAULT_CONFIG)):
        config = load_config(SITE_DEFAULT_CONFIG)
    else:  # get default config and print message about missing file
        config = load_config()

    if 'NTFY_BACKENDS' in environ:
        config['backends'] = environ['NTFY_BACKENDS'].split(',')

    if args.backend:
        config['backends'] = args.backend

    if args.option is None:
        args.option = {}
    for backend, backend_options in args.option.items():
        if backend is not None:
            config.setdefault(backend, {}).update(backend_options)

    if getattr(args, 'func', None) == run_cmd and args.longer_than is None and\
            'longer_than' in config:
        args.longer_than = config['longer_than']

    if getattr(args, 'func', None) == run_cmd and 'hide_command' in config:
        args.hide_command = config['hide_commnad']

    if hasattr(args, 'func'):
        message, retcode = args.func(args)
        if message is None:
            return 0
        if emojize is not None and not args.no_emoji:
            message = emojize(message, use_aliases=True)
        return notify(
            message,
            args.title,
            config,
            retcode=retcode,
            **dict(args.option.get(None, [])))
    else:
        parser.print_help()
def main():
    '''Main(). Commandline parsing and stalker startup.'''

    parser = argparse.ArgumentParser()

    parser.add_argument("-p",
                        "--posttroll_port",
                        dest="posttroll_port",
                        help="Local port where messages are published")
    parser.add_argument("-t",
                        "--topic",
                        dest="topic",
                        help="Topic of the sent messages")
    parser.add_argument("-c",
                        "--configuration_file",
                        help="Name of the config.ini configuration file")
    parser.add_argument("-C",
                        "--config_item",
                        help="Name of the configuration item to use")
    parser.add_argument("-e",
                        "--event_names",
                        help="Name of the events to monitor")
    parser.add_argument("-f",
                        "--filepattern",
                        help="Filepath pattern used to parse "
                        "satellite/orbit/date/etc information")
    parser.add_argument("-i",
                        "--instrument",
                        help="Instrument name in the satellite")

    if len(sys.argv) <= 1:
        parser.print_help()
        sys.exit()
    else:
        args = parser.parse_args()

    # Parse commandline arguments.  If args are given, they override
    # the configuration file.

    args_dict = vars(args)
    args_dict = {
        k: args_dict[k]
        for k in args_dict if args_dict[k] is not None
    }

    config = {}

    if args.configuration_file is not None:
        config_fname = args.configuration_file

        if "template" in config_fname:
            print("Template file given as trollstalker logging config,"
                  " aborting!")
            sys.exit()

        cparser = RawConfigParser()
        cparser.read(config_fname)
        config = dict(cparser.items(args.config_item, vars=args_dict))

    config.update(args_dict)

    config.update(
        {k: config[k].split(",")
         for k in config if "," in config[k]})

    config.setdefault("posttroll_port", "0")

    try:
        log_config = config["stalker_log_config"]
    except KeyError:
        try:
            loglevel = getattr(logging, config.get("loglevel", "DEBUG"))
            if loglevel == "":
                raise AttributeError
        except AttributeError:
            loglevel = logging.DEBUG

        logger.setLevel(loglevel)
        rootlogger = logging.getLogger("")
        rootlogger.setLevel(loglevel)
        strhndl = logging.StreamHandler()
        strhndl.setLevel(loglevel)
        log_format = "[%(asctime)s %(levelname)-8s %(name)s] %(message)s"
        formatter = logging.Formatter(log_format)

        strhndl.setFormatter(formatter)
        rootlogger.addHandler(strhndl)
    else:
        logging.config.fileConfig(log_config)

    logger.debug("Logger started")

    # Start watching for new files
    notifier = FilePublisher(config)
    notifier.start()

    try:
        while True:
            time.sleep(6000000)
    except KeyboardInterrupt:
        logger.info("Interrupting TrollStalker")
    finally:
        notifier.stop()
Exemple #21
0
def main(cli_args=None):
    if cli_args is not None:
        args = parser.parse_args(cli_args)
    else:
        args = parser.parse_args()

    logging.config.dictConfig({
        'version': 1,
        'disable_existing_loggers': False,
        'formatters': {
            'default': {
                'format': '%(levelname)s: %(message)s'
            },
        },
        'handlers': {
            'default': {
                'level': args.log_level,
                'class': 'logging.StreamHandler',
                'formatter': 'default',
            },
        },
        'loggers': {
            '': {
                'handlers': ['default'],
                'level': args.log_level,
                'propagate': True,
            }
        }
    })

    if args.config is not None:
        config = load_config(args.config)
    elif path.exists(path.expanduser(DEFAULT_CONFIG)):
        config = load_config(DEFAULT_CONFIG)
    elif path.exists(path.expanduser(OLD_DEFAULT_CONFIG)):
        config = load_config(OLD_DEFAULT_CONFIG)
    else:  # get default config and print message about missing file
        config = load_config()

    if args.backend:
        config['backends'] = args.backend

    for backend, backend_options in args.option.items():
        if backend is not None:
            config.setdefault(backend, {}).update(backend_options)


    if getattr(args, 'func', None) == run_cmd and args.longer_than is None and \
            'longer_than' in config:
        args.longer_than = config['longer_than']

    if args.title is None:
        args.title = config.get('title', default_title)

    if hasattr(args, 'func'):
        message = args.func(args)
        if message is None:
            return 0
        if emojize is not None and not args.no_emoji:
            message = emojize(message, use_aliases=True)
        return notify(message, args.title, config,
                      **dict(args.option.get(None, [])))
    else:
        parser.print_help()
Exemple #22
0
def get_config(config_file):
    """Parse agent config params in specified yaml file and return as Python dict"""
    with open(config_file) as f:
        config = yaml.load(f, Loader=yaml.FullLoader)
        # if missing in the config, use following defaults
        config.setdefault('shuffle_nodes', False)
        config.setdefault('observation_space', ['ingress_traffic'])
        # safety checks
        assert 'objective' in config and config['objective'] in SUPPORTED_OBJECTIVES, \
            f"Objective {config['objective']} not recognized. Must be one of {SUPPORTED_OBJECTIVES}, " \
            f"recommended default: 'prio-flow'."
        if config['objective'] == 'prio-flow':
            assert 'target_success' in config and \
                   (config['target_success'] == 'auto' or 0 <= config['target_success'] <= 1)
        if config['objective'] in {'soft-deadline', 'soft-deadline-exp'}:
            assert 'soft_deadline' in config
            if config['objective'] == 'soft-deadline-exp':
                assert 'dropoff' in config and config['dropoff'] > 0, "Use 'soft-deadline' objective for 0 dropoff."
        if config['objective'] == 'weighted':
            for weight in ['flow_weight', 'delay_weight', 'node_weight', 'instance_weight']:
                if weight not in config:
                    logger.warning(f"Using weighted objective, but {weight} not configured. Defaulting to {weight}=0.")
        config.setdefault('target_success', None)
        config.setdefault('soft_deadline', None)
        config.setdefault('dropoff', None)
        for weight in ['flow_weight', 'delay_weight', 'node_weight', 'instance_weight']:
            config.setdefault(weight, 0)
    return config
Exemple #23
0
    def _read_config(self, configfile, defaults=None):
        """Read a configuration file from disk.

        :param: configfile: the path to a configuration file
        :para: defaults:    default configuration values as a dictionary

        :returns: configuration values as a dictionary
        """

        # You can't have a dictionary as a default argument for a method:
        # http://pythonconquerstheuniverse.wordpress.com/category/
        #     python-gotchas/
        if not defaults:
            defaults = {}

        cp = ConfigParser(defaults=defaults)

        if not os.path.exists(configfile):
            raise exceptions.FileNotFound('Configuraton file %s is missing' %
                                          configfile)

        cp.read(configfile)
        if not cp.sections():
            raise exceptions.NoConfigFound(
                'The configuration file %s appears to contain no configuration'
                % configfile)

        config = self.config = dict([[s, dict(cp.items(s))]
                                     for s in cp.sections()])
        config.setdefault('main', {})['hostidfile'] = '/etc/opencenter/hostid'
        config_section = self.config_section

        if config_section in config:
            if 'include' in config[config_section]:
                # import and merge a single file
                if not os.path.isfile(config[config_section]['include']):
                    raise RuntimeError(
                        'file %s: include directive %s is not a file' %
                        (configfile, config[config_section]['include']))
                config = self.config = self._read_config(
                    config[config_section]['include'], defaults=config)

            if 'include_dir' in config[config_section]:
                # import and merge a whole directory
                if not os.path.isdir(config[config_section]['include_dir']):
                    raise RuntimeError(
                        'file %s: include_dir directive %s is not a directory'
                        % (configfile, config[config_section]['include_dir']))

                for f in sorted(
                        os.listdir(config[config_section]['include_dir'])):
                    if not f.endswith('.conf'):
                        self.logger.info('Skipping file %s because it does '
                                         'not end in .conf' % f)
                    else:
                        import_file = os.path.join(
                            config[config_section]['include_dir'], f)
                        config = self.config = self._read_config(
                            import_file, defaults=config)

        # merge in the read config into the exisiting config
        for section in config:
            if section in defaults:
                defaults[section].update(config[section])
            else:
                defaults[section] = config[section]

        # pass logging config off to logger
        return defaults
Exemple #24
0
import logging
import logging.config

import click
import yaml

from rfexplorer import RFExplorer

# TODO: look up implementing a repl that can perform lots of functions without
# having to run the cli multiple times
# would be usful for quickly searching channels to see if they're in use.


with open('logging.yaml') as f:
    config = yaml.load(f)
    config.setdefault('version', 1)
    logging.config.dictConfig(config)


logger = logging.getLogger(__name__)


@click.command()
@click.option('-p', '--port', prompt='port of the RFExplorer',
              help='full path for unix, com# for windows')
@click.option('-fpv', '--fpv-scan', is_flag=True)
def main(port, fpv_scan):
    """Console script for rfexplorer_api"""

    try:
        logger.info('wat')
Exemple #25
0
    def _read_config(self, configfile, defaults=None):
        """Read a configuration file from disk.

        :param: configfile: the path to a configuration file
        :para: defaults:    default configuration values as a dictionary

        :returns: configuration values as a dictionary
        """

        # You can't have a dictionary as a default argument for a method:
        # http://pythonconquerstheuniverse.wordpress.com/category/
        #     python-gotchas/
        if not defaults:
            defaults = {}

        cp = ConfigParser(defaults=defaults)

        if not os.path.exists(configfile):
            raise exceptions.FileNotFound(
                'Configuraton file %s is missing' % configfile)

        cp.read(configfile)
        if not cp.sections():
            raise exceptions.NoConfigFound(
                'The configuration file %s appears to contain no configuration'
                % configfile)

        config = self.config = dict([[s, dict(cp.items(s))]
                                     for s in cp.sections()])
        config.setdefault('main', {})['hostidfile'] = '/etc/opencenter/hostid'
        config_section = self.config_section

        if config_section in config:
            if 'include' in config[config_section]:
                # import and merge a single file
                if not os.path.isfile(config[config_section]['include']):
                    raise RuntimeError(
                        'file %s: include directive %s is not a file' % (
                            configfile,
                            config[config_section]['include']))
                config = self.config = self._read_config(
                    config[config_section]['include'], defaults=config)

            if 'include_dir' in config[config_section]:
                # import and merge a whole directory
                if not os.path.isdir(config[config_section]['include_dir']):
                    raise RuntimeError(
                        'file %s: include_dir directive %s is not a directory'
                        % (configfile,
                           config[config_section]['include_dir']))

                for f in sorted(os.listdir(
                        config[config_section]['include_dir'])):
                    if not f.endswith('.conf'):
                        self.logger.info('Skipping file %s because it does '
                                         'not end in .conf' % f)
                    else:
                        import_file = os.path.join(
                            config[config_section]['include_dir'],
                            f)
                        config = self.config = self._read_config(
                            import_file, defaults=config)

        # merge in the read config into the exisiting config
        for section in config:
            if section in defaults:
                defaults[section].update(config[section])
            else:
                defaults[section] = config[section]

        # pass logging config off to logger
        return defaults
Exemple #26
0
 def merge_config_before(sender, config):
     if data.get(EXTENDS_KEYWORD):
         extend_list = config.setdefault(EXTENDS_KEYWORD, [])
         extend_list += data[EXTENDS_KEYWORD]
def main():
    '''Main(). Commandline parsing and stalker startup.'''

    parser = argparse.ArgumentParser()

    parser.add_argument("-p", "--posttroll_port", dest="posttroll_port",
                        help="Local port where messages are published")
    parser.add_argument("-t", "--topic", dest="topic",
                        help="Topic of the sent messages")
    parser.add_argument("-c", "--configuration_file",
                        help="Name of the config.ini configuration file")
    parser.add_argument("-C", "--config_item",
                        help="Name of the configuration item to use")
    parser.add_argument("-e", "--event_names",
                        help="Name of the pyinotify events to monitor")
    parser.add_argument("-f", "--filepattern",
                        help="Filepath pattern used to parse "
                        "satellite/orbit/date/etc information")
    parser.add_argument("-i", "--instrument",
                        help="Instrument name in the satellite")

    if len(sys.argv) <= 1:
        parser.print_help()
        sys.exit()
    else:
        args = parser.parse_args()

    # Parse commandline arguments.  If args are given, they override
    # the configuration file.

    args_dict = vars(args)
    args_dict = {k: args_dict[k]
                 for k in args_dict if args_dict[k] != None}

    config = {}

    if args.configuration_file is not None:
        config_fname = args.configuration_file

        if "template" in config_fname:
            print "Template file given as trollstalker logging config," \
                " aborting!"
            sys.exit()

        cparser = ConfigParser()
        cparser.read(config_fname)
        config = dict(cparser.items(args.config_item, vars=args_dict))

    config.update(args_dict)

    config.update({k: config[k].split(",")
                   for k in config if "," in config[k]})

    config.setdefault("posttroll_port", "0")

    try:
        log_config = config["stalker_log_config"]
    except KeyError:
        try:
            loglevel = getattr(logging, config.get("loglevel", "DEBUG"))
            if loglevel == "":
                raise AttributeError
        except AttributeError:
            loglevel = logging.DEBUG

        LOGGER.setLevel(loglevel)
        rootlogger = logging.getLogger("")
        rootlogger.setLevel(loglevel)
        strhndl = logging.StreamHandler()
        strhndl.setLevel(loglevel)
        log_format = "[%(asctime)s %(levelname)-8s %(name)s] %(message)s"
        formatter = logging.Formatter(log_format)

        strhndl.setFormatter(formatter)
        rootlogger.addHandler(strhndl)
    else:
        logging.config.fileConfig(log_config)

    LOGGER.debug("Logger started")

    # Start watching for new files
    notifier = FilePublisher(config)
    notifier.start()

    try:
        while True:
            time.sleep(6000000)
    except KeyboardInterrupt:
        LOGGER.info("Interrupting TrollStalker")
    finally:
        notifier.stop()
Exemple #28
0
    def create_loggers(
        self,
        log_dir: str,
        params: List[Dict[str, Any]],
        disable_existing_loggers: bool = False,
    ):
        """Create loggers from configuration"""
        if self.logger_config is None:
            config = {}
        else:
            config = self.logger_config

        config.setdefault("loggers", {})
        config.setdefault("handlers", {})
        config.setdefault("formatters", {})

        global_logger_name = "tvm.meta_schedule"
        global_logger = logging.getLogger(global_logger_name)
        if global_logger.level is logging.NOTSET:
            global_logger.setLevel(logging.INFO)

        config["loggers"].setdefault(
            global_logger_name,
            {
                "level":
                logging._levelToName[  # pylint: disable=protected-access
                    global_logger.level],
                "handlers":
                [handler.get_name() for handler in global_logger.handlers] + [
                    global_logger_name + ".console",
                    global_logger_name + ".file"
                ],
                "propagate":
                False,
            },
        )
        config["loggers"].setdefault(
            "{logger_name}",
            {
                "level": "INFO",
                "handlers": [
                    "{logger_name}.file",
                ],
                "propagate": False,
            },
        )
        config["handlers"].setdefault(
            global_logger_name + ".console",
            {
                "class": "logging.StreamHandler",
                "stream": "ext://sys.stdout",
                "formatter": "tvm.meta_schedule.standard_formatter",
            },
        )
        config["handlers"].setdefault(
            global_logger_name + ".file",
            {
                "class": "logging.FileHandler",
                "filename": "{log_dir}/" + __name__ + ".task_scheduler.log",
                "mode": "a",
                "level": "INFO",
                "formatter": "tvm.meta_schedule.standard_formatter",
            },
        )
        config["handlers"].setdefault(
            "{logger_name}.file",
            {
                "class": "logging.FileHandler",
                "filename": "{log_dir}/{logger_name}.log",
                "mode": "a",
                "level": "INFO",
                "formatter": "tvm.meta_schedule.standard_formatter",
            },
        )
        config["formatters"].setdefault(
            "tvm.meta_schedule.standard_formatter",
            {
                "format": "%(asctime)s.%(msecs)03d %(levelname)s %(message)s",
                "datefmt": "%Y-%m-%d %H:%M:%S",
            },
        )

        # set up dictConfig loggers
        p_config = {
            "version": 1,
            "disable_existing_loggers": disable_existing_loggers
        }
        for k, v in config.items():
            if k in ["formatters", "handlers", "loggers"]:
                p_config[k] = batch_parameterize_config(v,
                                                        params)  # type: ignore
            else:
                p_config[k] = v
        logging.config.dictConfig(p_config)

        # check global logger
        if global_logger.level not in [logging.DEBUG, logging.INFO]:
            global_logger.warning(
                "Logging level set to %s, please set to logging.INFO"
                " or logging.DEBUG to view full log.",
                logging._levelToName[global_logger.level],  # pylint: disable=protected-access
            )
        global_logger.info("Logging directory: %s", log_dir)
Exemple #29
0
 def before_extend(self, sender, config):
     #raise
     if data.get(EXTENDS_KEYWORD):
         extend_list = config.setdefault(EXTENDS_KEYWORD, [])
         extend_list += data[EXTENDS_KEYWORD]