def setUp(self): config = {key: value for key, value in self.base_config.items()} config.update({ 'formatters': { 'graylog': { '()': 'graylog_json_formatter.GrayLogJSONFormatter', 'format': '({levelname}) | {name} | [{asctime}]: ' 'File {pathname}:{lineno}" - {funcName}() | ' '{message}', 'style': '{', 'source': 'test', 'extra': lambda record: { 'service': 'service-func' } } }, }) logging.config.dictConfig(config) self.logger = logging.getLogger('test')
def cfg(self): """Load the application configuration. This method loads configuration from python module. """ config = LStruct(self.defaults) module = config['CONFIG'] = os.environ.get( CONFIGURATION_ENVIRON_VARIABLE, config['CONFIG']) if module: try: module = import_module(module) config.update({ name: getattr(module, name) for name in dir(module) if name == name.upper() and not name.startswith('_') }) except ImportError as exc: config.CONFIG = None self.logger.error("Error importing %s: %s", module, exc) # Patch configuration from ENV for name in config: if name.startswith( '_') or name != name.upper() or name not in os.environ: continue try: config[name] = json.loads(os.environ[name]) except ValueError: pass return config
def load_config(settings_file='./test_settings.py'): """ Loads the config files merging the defaults with the file defined in environ.PULLSBURY_SETTINGS if it exists. """ config = Config(os.getcwd()) if 'PULLSBURY_SETTINGS' in os.environ: config.from_envvar('PULLSBURY_SETTINGS') else: config.from_pyfile(settings_file) if config.get('LOGGING_CONFIG'): logging.config.fileConfig( config.get('LOGGING_CONFIG'), disable_existing_loggers=False) json_values = [ 'TEAMS', 'HAPPY_SLACK_EMOJIS', 'REPO_BLACKLIST', 'SLACK_CUSTOM_EMOJI_MAPPING' ] for value in json_values: config.update({ value: json.loads(config.get(value, '{}')) }) return config
def parse_config(filename='config.yaml') -> Dict: s = _read_file(filename) config = _parse_config(s) log_conf = get_config(config) config.update(update_config(config)) logging.config.dictConfig(log_conf) return Dict(config)
def __init__(self, args, options='', timestamp=True): # parse default and custom cli options for opt in options: args.add_argument(*opt.flags, default=None, type=opt.type) args = args.parse_args() if args.device: os.environ["CUDA_VISIBLE_DEVICES"] = args.device if args.resume is None: msg_no_cfg = "Configuration file need to be specified. Add '-c config.json', for example." assert args.config is not None, msg_no_cfg self.cfg_fname = Path(args.config) config = read_json(self.cfg_fname) self.resume = None else: self.resume = Path(args.resume) resume_cfg_fname = self.resume.parent / 'config.json' config = read_json(resume_cfg_fname) if args.config is not None: config.update(read_json(Path(args.config))) # load config file and apply custom cli options self._config = _update_config(config, options, args) # set save_dir where trained model and log will be saved. # save_dir = Path(self.config['trainer']['save_dir']) # timestamp = datetime.now().strftime(r'%m%d_%H%M%S') if timestamp else '' exper_name = self.config['name']
def create(config): """http://flask.pocoo.org/docs/patterns/appfactories/""" app = Flask(__name__.split('.')[0]) # Load YAML config into app.config if not isinstance(config, dict): with open(os.path.join(app.instance_path, config)) as f: config = yaml.load(f) config.update({k.upper(): v for k, v in config["flask"].items()}) app.config.update(config) del config # Configure logging logging.config.dictConfig(app.config["logging"]) # Initialize database app.database = Database(app) # Load blueprints from .views.submit import submit app.register_blueprint(submit) if False and app.debug: app.logger.warning("Debug mode is on. Do not use this in production.") from .views.debug import debug app.register_blueprint(debug) return app
def load_config(): src_root = realpath(dirname(__file__)) project_root = realpath(dirname(src_root)) filenames = [ join(src_root, filename) for filename in ('config.yaml', 'local.yaml') ] config = {} for name in filenames: if not exists(name): continue with open(name) as f: data = yaml.load(f) config = merge_dicts(config, data) config.update({ 'src_root': src_root, 'tmp_root': join(project_root, 'tmp/'), 'logs_root': join(project_root, 'logs/'), }) config['webhook_url'] = 'https://{hostname}/{token}'.format( hostname=config['host'], token=config['telegram']['token']) config['evernote'][ 'oauth_callback_url'] = 'https://{hostname}{path}'.format( hostname=config['host'], path=config['evernote']['oauth_path']) makedirs(config['logs_root'], exist_ok=True) makedirs(config['tmp_root'], exist_ok=True) logging_config = get_logging_config(config['logs_root']) logging.config.dictConfig(logging_config) return config
def __init__(self): # Config set up. Environment overrides app.yaml with open("cfg/app.yaml", "r") as f: config = yaml.safe_load(f) config.update(os.environ) # Logger set up self._init_logger(config["log_dir"]) self._logger.info("Initializing the App") # Quik connector self._connector = WebQuikConnector(conn=config["conn"], passwd=config["passwd"], account=config["account"]) # Feed2Csv just receive price and level2 for single configured asset and write to data folder web_quik_feed = WebQuikFeed(self._connector, rabbit_host=config["rabbit_host"]) # self._feed = Feed2Csv(web_quik_feed, config.sec_class, config.sec_code) # Broker is not implemented, just a stub. web_quik_broker = WebQuikBroker(connector=self._connector, client_code=config["client_code"], trade_account=config["trade_account"], rabbit_host=config["rabbit_host"]) # Create feed, subscribe events # Todo: support making orders self._strategy = Strategy(web_quik_feed, web_quik_broker, config["sec_class"], config["sec_code"])
def read() -> None: """ Read configuration into global variable 'config'. """ with open('disy.yml') as f: y = yaml.load(f) or {} config.update(y.items())
def get_config(args): config = {} if args.config: with open(args.config, 'r') as f: config.update(yaml.load(f.read())) return config
def update_config(overlay_dict): """Apply a partial config dict to the singleton config dict Values for top-level keys will be overwritten with those in overlay_dict No recursion will take place for sub-dicts; the entire sub-dict will be overwritten """ config.update(overlay_dict) new_config = config_schema.check_value(config) config.clear() config.update(new_config)
def parse_commandline(args=None, config=None): """Construct commandline parser and then parse arguments. :param args: Command line arguments to parse. If none are given then ``sys.argv`` is used by default. :param config: ``configparser.ConfigParser`` object that contains the initial configuration of the system. The default is to use ``load_configuration`` to get the default. :return: A namespace that contains attributes with values determined from ``config`` and then command line arguments. """ if config is None: config = load_configuration() parser = argparse.ArgumentParser() parser.add_argument("--version", action="version", version=f"%(prog)s {__version__}") parser.add_argument( "--verbose", type=str, default="warning", choices=["critical", "error", "warning", "info", "debug"], help="Change default logging verbosity.", ) subparsers = parser.add_subparsers(help="sub-command help") init_subparser(subparsers) models_subparser(subparsers, config) upload_subparser(subparsers, config) train_subparser(subparsers, config) predict_subparser(subparsers, config) options = parser.parse_args(args=args) # Adjust root logger and console to match verbose rootlogger = logging.getLogger() rootlogger.setLevel(options.verbose.upper()) rootconsole = [h for h in rootlogger.handlers if h.get_name() == "console"] if rootconsole: rootconsole = rootconsole[0] rootconsole.setLevel(options.verbose.upper()) # Update configuration and set in options config.update(options) options.configuration = config if not hasattr(options, "func"): parser.print_help() sys.exit(1) if options.verbose == "debug": print("Working directory:", pathlib.Path.cwd(), file=sys.stderr) return options
def _post_init_config(params): ''' add additional parameters into config ''' logger.warning('params: %s', params) for (k, v) in params.iteritems(): if k in config: logger.warning('params will be overwrite: key: %s origin: %s new: %s', k, config[k], v) config.update(params)
def make_from_config_list(cls, config_paths): """ Initialize the App by using the configuration list. This function must be executed only once. :param config_paths: :return: App Class """ config = Config.get_empty() for path in config_paths: config.update(config.from_yaml(path)) return cls.instance(config, update=True)
def reload_config(): """[Re]load the configuration * [Re-]reads the identified config file and replaces the contents of the singleton config dict with it * Any config variables in os.environ will be overlaid onto the dict [again] * Normalization and setting of defaults will occur [again] """ new_config = read_config_file(config_filename) _env_config(new_config) new_config = config_schema.check_value(new_config) config.clear() config.update(new_config)
def main(args): app_name = os.path.basename(os.path.dirname(os.path.abspath(__file__))) config = load_config(app_name, 'credentials', 'site.conf', verbose=True, s3_bucket=os.getenv("SETTINGS_BUCKET", None), passphrase=os.getenv("SETTINGS_CRYPT_KEY", None)) config.update({ 'SESSION_COOKIE_NAME': 'sessionid', 'ALLOWED_NO_HOST': ['/static/'] }) app.config.update(config) settings.update(DEBUG=app.debug, APP_NAME=app_name) logging.config.dictConfig({ 'version': 1, 'disable_existing_loggers': False, 'filters': {}, 'formatters': { 'simple': { 'format': 'X X %(levelname)s [%(asctime)s] %(message)s', 'datefmt': '%d/%b/%Y:%H:%M:%S %z' }, }, 'handlers': { 'logfile': { 'level': 'DEBUG', 'formatter': 'simple', 'filters': [], 'class': 'logging.StreamHandler', }, }, 'loggers': { 'deployutils': { 'handlers': [], 'level': 'INFO', }, # This is the root logger. # The level will only be taken into account if the record is not # propagated from a child logger. #https://docs.python.org/2/library/logging.html#logging.Logger.propagate '': { 'handlers': ['logfile'], 'level': 'INFO' }, }, }) global session session = sessions.Session() session.init_app(app) app.run()
def reload(self): """Reload the configuration from disk returning True if the configuration has changed from the previous values. """ config = self._default_configuration() if self._file_path: config.update(self._load_config_file()) if config != self._values: self._values = config return True return False
def _update_cli_config(dict_conf: Dict) -> None: """ Update CLI config and write to yaml file. :param dict_conf: dict config to write. :return: None """ config = _get_or_create_cli_config() config.update(dict_conf) with open(CLI_CONFIG_PATH, "w") as f: yaml.dump(config, f, default_flow_style=False)
def get_baboond_config(): """ Returns the baboond full dict configuration. """ arg_attrs = get_config_args(PARSER) file_attrs = get_config_file(arg_attrs, 'baboondrc') init_config_log(arg_attrs, LOGGING) config = {} config.update(arg_attrs) config.update(file_attrs) return config
def supervisor(self, config): config.update({ key: val for key, val in os.environ.items() if key.startswith("SUPERVISOR_") }) rpc = supervisor.childutils.getRPCInterface(config) rpc.supervisor.getState() state = rpc.supervisor.getState().get("statename") if state != consts.RUNNING: raise ValueError("can not talk to supervisor server") self._supervisor = rpc.supervisor return self._supervisor
def _post_init_config(params): ''' add additional parameters into config ''' logger.warning('params: %s', params) for (k, v) in params.iteritems(): if k in config: logger.warning( 'params will be overwrite: key: %s origin: %s new: %s', k, config[k], v) config.update(params)
def configure(backend=backend.amqp_puka.AMQPHandler, # args=("amqp://*****:*****@localhost:5672/%2F", "lc-topic", "topic"), args=[], kwargs={}): ''' Explicitly configure contexture with a handler. This will skip the logging-based configuration. ''' global backend_handler backend_handler = backend(*args, **kwargs) config.update({k: v for k, v in kwargs.iteritems() if k in config}) logging.basicConfig()
def load_config(path=None): """Load configuration.""" global config _path = abspath("." if path is None else path) try: with open(join(_path, "item_config.yaml")) as f: result = yaml.safe_load(f) config["_from_file"] = result config.update(result) config["_filename"] = "item_config.yaml" except FileNotFoundError: if path is not None: raise
def setup(cfg=None): if cfg and isinstance(cfg, dict): config.update(cfg) if os.path.exists('logger.conf'): logging.config.fileConfig('logger.conf', disable_existing_loggers=False) logger_filter = LoggerFilter() logging.getLogger().addFilter(logger_filter) for logger_name in list(logging.getLogger().manager.loggerDict.keys()): if logger_name.startswith('es.') or logger_name.startswith('tornado'): logging.getLogger(logger_name).addFilter(logger_filter)
def new_config(new_config): """ Temporarily change configuration dictionary. """ orig_config = config.copy() try: config.clear() config.update(new_config) initialize_logging(config) yield finally: config.clear() config.update(orig_config) initialize_logging(config)
def get_oxauth_config(self): req = self.backend.exec_query( "SELECT oxRevision, oxAuthConfDynamic, oxAuthConfWebKeys " "FROM `gluu` " "USE KEYS 'configuration_oxauth'", ) if not req.ok: return {} config = req.json()["results"][0] if not config: return {} config.update({"id": "configuration_oxauth"}) return config
def get_configuration(self): req = self.backend.exec_query( "SELECT oxTrustCacheRefreshServerIpAddress, gluuVdsCacheRefreshEnabled " "FROM `gluu` " "USE KEYS 'configuration'") if not req.ok: return {} config = req.json()["results"][0] if not config: return {} config.update({"id": "configuration"}) return config
def clean_stores(self): fleet = self._config_params.get('fleet') robot_proxy_store_config = self._config_params.get("robot_proxy_store") robot_store_config = self._config_params.get("robot_store") store_configs = {'robot_proxy_store': robot_proxy_store_config, 'robot_store': robot_store_config} for robot_id in fleet: for store_name, config in store_configs.items(): config.update({'db_name': store_name + '_' + robot_id.split('_')[1]}) store = MongoStore(**config) self.clean_store(store) ccu_store_config = self._config_params.get('ccu_store') store = MongoStore(**ccu_store_config) self.clean_store(store)
def get_auth_config(self): bucket = os.environ.get("CN_COUCHBASE_BUCKET_PREFIX", "jans") req = self.client.exec_query( "SELECT jansRevision, jansConfDyn, jansConfWebKeys " f"FROM `{bucket}` " "USE KEYS 'configuration_jans-auth'", ) if not req.ok: return {} config = req.json()["results"][0] if not config: return {} config.update({"id": "configuration_jans-auth"}) return config
def _load_config(config_filepaths): """ Load config files with inheritance Load each config file and update the result dictionary. """ # Base Settings config = {} config['MODULE_ROOT_PATH'] = os.path.dirname(os.path.dirname(__file__)) for fp in config_filepaths: with open(fp) as f: config.update(yaml.load(f)) _update_relpaths(config, '_PATH') _update_relpaths(config, '_FILEPATH') return config
def new_config(new_config): """ Temporarily change configuration dictionary. """ from .config import defaults config = dask.config.config orig_config = config.copy() try: config.clear() config.update(defaults.copy()) dask.config.update(config, new_config) initialize_logging(config) yield finally: config.clear() config.update(orig_config) initialize_logging(config)
def readXMLConfig(filename): config = {} ctx, root = resource_helper.getRoot(filename, safe=False) if root is None: return config else: sectionCtx, section = resource_helper.getSubSection(ctx, root, 'common', safe=False) if section is not None: config.update(resource_helper.readDict(sectionCtx, section).value) for key in ('filters', 'formatters', 'handlers', 'loggers'): config[key] = _readConfigItem(key, ctx, root) resource_helper.purgeResource(filename) return config
def main(args=None): args = parse_args(args) main_proc = rpc_server.MainProcess() config = {} # load default configuration from file for path in (args.config_file, 'config.yaml'): if path and os.path.exists(path): config = read_config(path) break process_config(config) # allow overwritting any configuration setting via env vars for k, v in os.environ.items(): if k.startswith('WORKER_'): config[k.replace("WORKER_", "").replace("_", ".").lower()] = v # make zmon worker compatible with old redis config vars if 'redis.host' in config: port = config.get('redis.port', 6379) config.update({"redis.servers":'{}:{}'.format(config["redis.host"], port)}) # save config in our settings module settings.set_workers_log_level(config.get('loglevel', 'INFO')) settings.set_external_config(config) settings.set_rpc_server_port(config.get('server.port')) logging.config.dictConfig(settings.RPC_SERVER_CONF['LOGGING']) # start the process controller main_proc.start_proc_control() # start some processes per queue according to the config queues = config['zmon.queues']['local'] for qn in queues.split(','): queue, N = (qn.rsplit('/', 1) + [DEFAULT_NUM_PROC])[:2] main_proc.proc_control.spawn_many(int(N), kwargs={"queue": queue, "flow": "simple_queue_processor"}) if not args.no_rpc: main_proc.start_rpc_server() return main_proc
def init(): config = {'logging': {'level': 'DEBUG'}} MULTINET_SERVER_CONFIG_DIR = os.environ.get( 'MULTINET_SERVER_CONFIG_DIR', os.path.join('/', 'etc', 'gremlin-server')) default_config = os.path.join(MULTINET_SERVER_CONFIG_DIR, 'config.yml') default_logging_config = os.path.join(MULTINET_SERVER_CONFIG_DIR, 'logging.conf') conf_file = (os.environ.get('MULTINET_SERVER_CONF') or default_config) if os.path.exists(conf_file): with open(conf_file) as stream: config.update(yaml.load(stream)) logging.basicConfig(stream=sys.stdout, level=logging.getLevelName(config['logging']['level']))
def set_global_logging(logging_file: str = None, logging_level: int = logging.INFO, logging_config_file: Union[str, None] = None) -> None: import logging.config if not logging_config_file: config = dict( level=logging_level, format= "%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s" ) if logging_file: config.update(filename=logging_file) logging.basicConfig(**config) else: if logging_config_file == 'default': logging_config_file = os.path.join(config_dir, 'logging.yaml') with open(logging_config_file, 'r') as f: config = yaml.load(f) logging.config.dictConfig(config)
def init_from_ini(ini_file, games=None, overrides=None): import logging import logging.config logging.config.fileConfig(ini_file) # Reset log after logging has been configured global log log = logging.getLogger(__name__) full_config = ConfigParser.SafeConfigParser() full_config.readfp(open(ini_file)) config = dict(full_config.items('DEFAULT')) if overrides: config.update(overrides) init(config, games) return (full_config, config)
def cfg(self): """ Load the application configuration. """ config = LStruct(self.defaults) module = config['CONFIG'] = os.environ.get( CONFIGURATION_ENVIRON_VARIABLE, config['CONFIG']) if module: try: module = import_module(module) config.update({ name: getattr(module, name) for name in dir(module) if name == name.upper() and not name.startswith('_') }) except ImportError: config.CONFIG = None self.register_on_start( lambda app: app.logger.warn("The configuration hasn't found: %s" % module)) return config
def cfg(self): """Load the application configuration. This method loads configuration from python module. """ config = utils.LStruct(self.defaults) module = config['CONFIG'] = os.environ.get( "CONFIG", config['CONFIG']) if module: try: module = import_module(module) config.update({ name: getattr(module, name) for name in dir(module) if name == name.upper() and not name.startswith('_') }) except ImportError as exc: config.CONFIG = None print("Error importing %s: %s" % (module, exc)) return config
def load_config(): src_root = realpath(dirname(__file__)) project_root = realpath(dirname(src_root)) filenames = [join(src_root, filename) for filename in ('config.yaml', 'local.yaml')] config = {} for name in filenames: if not exists(name): continue with open(name) as f: data = yaml.load(f) config = merge_dicts(config, data) config.update({ 'src_root': src_root, 'tmp_root': join(project_root, 'tmp/'), 'logs_root': join(project_root, 'logs/'), }) config['webhook_url'] = 'https://{hostname}/{token}'.format(hostname=config['host'], token=config['telegram']['token']) config['evernote']['oauth_callback_url'] = 'https://{hostname}{path}'.format(hostname=config['host'], path=config['evernote']['oauth_path']) makedirs(config['logs_root'], exist_ok=True) makedirs(config['tmp_root'], exist_ok=True) logging_config = get_logging_config(config['logs_root']) logging.config.dictConfig(logging_config) return config
def cfg(self): """Load the application configuration. This method loads configuration from python module. """ config = LStruct(self.defaults) module = config['CONFIG'] = os.environ.get( CONFIGURATION_ENVIRON_VARIABLE, config['CONFIG']) if module: try: module = import_module(module) config.update({ name: getattr(module, name) for name in dir(module) if name == name.upper() and not name.startswith('_') }) except ImportError as exc: config.CONFIG = None message = "Error importing %s: %s" % (module, exc) self.register_on_start(lambda app: app.logger.error(message)) return config
# You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from models import Material, Blueprint, Ingredient, Engineer from utils import DotDict from sqlalchemy import create_engine, distinct from sqlalchemy.orm import sessionmaker, joinedload, subqueryload import click import yaml import logging import logging.config import json config = DotDict() with open("config.yml", "r") as fp: config.update(yaml.load(fp)) logging.config.dictConfig(config["logging"]) log = logging.getLogger(__name__) engine = create_engine(config["db.url"], echo=config["db.echo"]) Session = sessionmaker(bind=engine) @click.command() @click.argument("outfile") def json_export(outfile): db = Session() data = dict() data["materials"] = [ it.to_dict(["locations"])
def raster2pyramid( input_file, output_dir, options ): """ Creates a tile pyramid out of an input raster dataset. """ pyramid_type = options["pyramid_type"] scale_method = options["scale_method"] output_format = options["output_format"] resampling = options["resampling"] zoom = options["zoom"] bounds = options["bounds"] overwrite = options["overwrite"] # Prepare process parameters minzoom, maxzoom = _get_zoom(zoom, input_file, pyramid_type) process_file = os.path.join( os.path.dirname(os.path.realpath(__file__)), "tilify.py" ) with rasterio.open(input_file, "r") as input_raster: output_bands = input_raster.count input_dtype = input_raster.dtypes[0] output_dtype = input_raster.dtypes[0] nodataval = input_raster.nodatavals[0] if not nodataval: nodataval = 0 if output_format == "PNG": if output_bands > 3: output_bands = 3 output_dtype = 'uint8' scales_minmax = () if scale_method == "dtype_scale": for index in range(1, output_bands+1): scales_minmax += (DTYPE_RANGES[input_dtype], ) elif scale_method == "minmax_scale": for index in range(1, output_bands+1): band = input_raster.read(index) scales_minmax += ((band.min(), band.max()), ) elif scale_method == "crop": for index in range(1, output_bands+1): scales_minmax += ((0, 255), ) if input_dtype == "uint8": scale_method = None scales_minmax = () for index in range(1, output_bands+1): scales_minmax += ((None, None), ) # Create configuration config = {} config.update( process_file=process_file, output={ "path": output_dir, "format": output_format, "type": pyramid_type, "bands": output_bands, "dtype": output_dtype }, scale_method=scale_method, scales_minmax=scales_minmax, input_files={"raster": input_file}, config_dir=os.getcwd(), process_minzoom=minzoom, process_maxzoom=maxzoom, nodataval=nodataval, resampling=resampling, bounds=bounds, pixelbuffer=5, baselevel={"zoom": maxzoom, "resampling": resampling} ) LOGGER.info("preparing process ...") try: mapchete = Mapchete( MapcheteConfig( config, zoom=zoom, bounds=bounds ) ) except PyCompileError as error: print error return except: raise # Prepare output directory and logging if not os.path.exists(output_dir): os.makedirs(output_dir) logging.config.dictConfig(get_log_config(mapchete)) for zoom in reversed(range(minzoom, maxzoom+1)): # Determine work tiles and run work_tiles = mapchete.get_work_tiles(zoom) func = partial(_worker, mapchete=mapchete, overwrite=overwrite ) pool = Pool() try: pool.map_async(func, work_tiles) pool.close() except KeyboardInterrupt: LOGGER.info( "Caught KeyboardInterrupt, terminating workers" ) pool.terminate() break except: raise finally: pool.close() pool.join()
#!/usr/bin/env python # This file is part of HemeLB and is Copyright (C) # the HemeLB team and/or their institutions, as detailed in the # file AUTHORS. This software is provided under the terms of the # license in the file LICENSE. import yaml import os import logging import logging.config localroot = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) #Load and invoke the default non-machine specific config JSON dictionaries. defaults_file = open(os.path.join(localroot, 'Tools', 'analysis', 'config_defaults.yml')) config = yaml.load(defaults_file) defaults_file.close() user_file = open(os.path.join(localroot, 'Tools', 'analysis', 'config.yml')) config.update(yaml.load(user_file)) user_file.close() logging_settings_file = open(os.path.join(localroot, 'Tools', 'analysis', 'logging.yml')) dc = yaml.load(logging_settings_file) logging_settings_file.close() dc['handlers']['parsing']['filename'] = os.path.expanduser(os.path.join(config['reports_path'], 'parsing.log')) logging.config.dictConfig(dc)
def setup_logging(**kwargs): config = DEFAULT_BOT_SETTINGS["logging"] config.update(kwargs) logging.config.dictConfig(config)
def test_config_update_context_converts_list(): import math config = konch.Config() config.update({"context": [math]}) assert config["context"] == {"math": math}
def main(): '''Main(). Commandline parsing and stalker startup.''' parser = argparse.ArgumentParser() parser.add_argument("-p", "--posttroll_port", dest="posttroll_port", help="Local port where messages are published") parser.add_argument("-t", "--topic", dest="topic", help="Topic of the sent messages") parser.add_argument("-c", "--configuration_file", help="Name of the config.ini configuration file") parser.add_argument("-C", "--config_item", help="Name of the configuration item to use") parser.add_argument("-e", "--event_names", help="Name of the pyinotify events to monitor") parser.add_argument("-f", "--filepattern", help="Filepath pattern used to parse " "satellite/orbit/date/etc information") parser.add_argument("-i", "--instrument", help="Instrument name in the satellite") if len(sys.argv) <= 1: parser.print_help() sys.exit() else: args = parser.parse_args() # Parse commandline arguments. If args are given, they override # the configuration file. args_dict = vars(args) args_dict = {k: args_dict[k] for k in args_dict if args_dict[k] != None} config = {} if args.configuration_file is not None: config_fname = args.configuration_file if "template" in config_fname: print "Template file given as trollstalker logging config," \ " aborting!" sys.exit() cparser = ConfigParser() cparser.read(config_fname) config = dict(cparser.items(args.config_item, vars=args_dict)) config.update(args_dict) config.update({k: config[k].split(",") for k in config if "," in config[k]}) config.setdefault("posttroll_port", "0") try: log_config = config["stalker_log_config"] except KeyError: try: loglevel = getattr(logging, config.get("loglevel", "DEBUG")) if loglevel == "": raise AttributeError except AttributeError: loglevel = logging.DEBUG LOGGER.setLevel(loglevel) rootlogger = logging.getLogger("") rootlogger.setLevel(loglevel) strhndl = logging.StreamHandler() strhndl.setLevel(loglevel) log_format = "[%(asctime)s %(levelname)-8s %(name)s] %(message)s" formatter = logging.Formatter(log_format) strhndl.setFormatter(formatter) rootlogger.addHandler(strhndl) else: logging.config.fileConfig(log_config) LOGGER.debug("Logger started") # Start watching for new files notifier = FilePublisher(config) notifier.start() try: while True: time.sleep(6000000) except KeyboardInterrupt: LOGGER.info("Interrupting TrollStalker") finally: notifier.stop()
__copyright__ = "Copyright (C) 2014-2017 Ivan Kravets" config = dict( SQLALCHEMY_DATABASE_URI=None, GITHUB_LOGIN=None, GITHUB_PASSWORD=None, DL_PIO_DIR=None, DL_PIO_URL=None, MAX_DLFILE_SIZE=1024 * 1024 * 150, # 150 Mb # Fuzzy search will not be applied to words shorter than the value below SOLR_FUZZY_MIN_WORD_LENGTH=3, LOGGING=dict(version=1) ) assert "PIOAPI_CONFIG_PATH" in os.environ with open(os.environ.get("PIOAPI_CONFIG_PATH")) as f: config.update(json.load(f)) # configure logging for packages logging.basicConfig() logging.config.dictConfig(config['LOGGING']) # setup time zone to UTC globally os.environ['TZ'] = "+00:00" try: from time import tzset tzset() except ImportError: pass
import os import jsmin import json import logging.config current_path = os.path.dirname(__file__) local_config_path = os.path.join(current_path, 'local.config.json') config_path = os.path.join(current_path, 'config.json') config = json.loads(jsmin.jsmin(open(config_path).read())) if os.path.exists(local_config_path): config.update(json.loads(jsmin.jsmin(open(local_config_path).read()))) config["current_dir"] = os.path.dirname(os.path.abspath(__file__)) if not "template_dir" in config: config["template_dir"] = os.path.join(config["current_dir"], "templates") if not "template_path" in config: config["template_path"] = os.path.join(config["current_dir"], "templates") if not "staticfiles_dir" in config: config["staticfiles_dir"] = os.path.join(config["current_dir"], "images") if not "static_path" in config: config["static_path"] = config["current_dir"]
def test_config_shallow_merges_context(): config = konch.Config() config.update({"context": {"foo": 42}, "banner": "bar"}) config.update({"context": {"baz": 24}, "banner": "qux"}) assert config["context"] == {"foo": 42, "baz": 24} assert config["banner"] == "qux" config = konch.Config() config.update({"context": {"foo": 42}}) config.update({"context": {"foo": 24}}) assert config["context"] == {"foo": 24} config = konch.Config() config.update({"context": {"foo": {"inner": 42}}}) config.update({"context": {"foo": {"inner2": 24}}}) assert config["context"] == {"foo": {"inner2": 24}} config = konch.Config() def bar(): pass config.update({"context": {"foo": 42}, "banner": "bar"}) config.update({"context": [bar], "banner": "bar"})
def main(args=None): args = parse_args(args) main_proc = rpc_server.MainProcess() config = {} # load default configuration from file for path in (args.config_file, 'config.yaml'): if path and os.path.exists(path): config = read_config(path) break process_config(config) # allow overwritting any configuration setting via env vars for k, v in os.environ.items(): if k.startswith('WORKER_'): config[k.replace("WORKER_", "").replace("_", ".").lower()] = v # make zmon worker compatible with old redis config vars if 'redis.host' in config: port = config.get('redis.port', 6379) config.update({"redis.servers": '{}:{}'.format(config["redis.host"], port)}) # save config in our settings module settings.set_workers_log_level(config.get('loglevel', 'INFO')) settings.set_external_config(config) settings.set_rpc_server_port(config.get('server.port')) logging.config.dictConfig(settings.RPC_SERVER_CONF['LOGGING']) logger = logging.getLogger(__name__) # start the process controller main_proc.start_proc_control() # start worker processes per queue according to the config queues = config['zmon.queues'] for qn in queues.split(','): queue, N = (qn.rsplit('/', 1) + [DEFAULT_NUM_PROC])[:2] main_proc.proc_control.spawn_many(int(N), kwargs={"queue": queue, "flow": "simple_queue_processor"}, flags=MONITOR_RESTART | MONITOR_KILL_REQ | MONITOR_PING) # start web server process under supervision main_proc.proc_control.spawn_process( target=start_web, kwargs=dict( listen_on=config.get('webserver.listen_on', '0.0.0.0'), port=int(config.get('webserver.port', '8080')), log_conf=None, threaded=True, rpc_url='http://{host}:{port}{path}'.format(host='localhost', port=config.get('server.port'), path=settings.RPC_SERVER_CONF['RPC_PATH']), ), flags=MONITOR_RESTART, # web server will be restarted if dies ) if not args.no_rpc: try: main_proc.start_rpc_server() except (KeyboardInterrupt, SystemExit): logger.info('RPC server stopped. Exiting main') return main_proc