def override_parameters(config_filename, config_label): config = pm.getService("config") parser = ConfigParser.SafeConfigParser() result = parser.read(config_filename) if len(result) != 1: # file was not read, warn and return chapi_debug('PARAMETERS', 'Unable to read developer %s file %s' % (config_label, config_filename)) else: chapi_warn('PARAMETERS', "Over-riding configs using %s config file %s" % (config_label, config_filename)) # FIXME: Only allow log settings to be changed? for param in default_parameters: pname = param[NAME_KEY] (section, option) = param_to_secopt(pname) if parser.has_option(section, option): if VALUE_KEY in param: value_type = type(param[VALUE_KEY]) else: value_type = str value = get_typed_value(parser, section, option, value_type) if value is not None: # If a value was extracted, set it msg = 'Setting parameter %s to %s from %s' chapi_info('PARAMETERS', msg % (pname, value, config_filename)) config.set(pname, value)
def main(): config = ConfigManager() config.set_many(DEFAULT_CONFIG) _config = ConfigManager(schema=schema, defaults=defaults) config.set_many(_config.as_dict()) config.set_many( ConfigManager(schema=schema, path=_config.get('config.file')).as_dict()) config.set_many(_config.as_dict()) config.commit() if config.get('dump_configuration'): conf = config.as_dict() conf.pop('config42') print(yaml.dump(conf)) exit(0) if config.get('show_version'): print("Installed version {}".format(instabot_py.__version__)) exit(0) if not config.get('ignore_updates'): last_version = get_last_version() if last_version and last_version != instabot_py.__version__: print( "Newer version available: {}, The current version: {}".format( last_version, instabot_py.__version__)) print( "To update, please type \n python3 -m pip install instabot-py --upgrade --no-cache-dir " ) print("") print( " > You can ignore warning, run the instabot with --ignore-updates flag" ) exit(0) if config.get('verbosity'): verbosity = int(config.get('verbosity')) if verbosity == 1: level = logging.INFO elif verbosity > 1: level = logging.DEBUG config.set("logging.root.level", level) logging.config.dictConfig(config.get("logging")) try: bot = InstaBot(config=config) except CredsMissing: print( "You didn't provide your Instagram login & password or you didn't specify the configuration file" ) print("Try again :") print("") print(" instabot-py --login YOUR_LOGIN --password YOUR_PASSWORD") print(" instabot-py -c your-config.yml") print( "You can export and modify the default configuration by typing the command below" ) print(" instabot-py --dump") exit(1) bot.mainloop()
def main(): config = ConfigManager() config.set_many(DEFAULT_CONFIG) _config = ConfigManager(schema=schema, defaults=defaults) config.set_many(_config.as_dict()) config.set_many(ConfigManager(path=_config.get('config.file')).as_dict()) config.set_many(_config.as_dict()) config.commit() if config.get('dump_configuration'): conf = config.as_dict() conf.pop('config42') print(yaml.dump(conf)) exit(0) if config.get('show_version'): print(instabot_py.__version__) exit(0) if config.get('verbosity'): verbosity = int(config.get('verbosity')) if verbosity == 1: level = logging.INFO elif verbosity > 1: level = logging.DEBUG config.set("logging.root.level", level) logging.config.dictConfig(config.get("logging")) bot = InstaBot(config=config) bot.mainloop()
def dict2config(jdict, section): config = ConfigParser.ConfigParser() if not config.has_section(section): config.add_section(section) for k, v in jdict.iteritems(): config.set(section, k, str(v)) return config
def dict2config(jdict, section): config = ConfigParser() if not config.has_section(section): config.add_section(section) for (k, v) in viewitems(jdict): config.set(section, k, str(v)) return config
def dict2config(jdict, section): config = ConfigParser.ConfigParser() if not config.has_section(section): config.add_section(section) for k,v in jdict.iteritems(): config.set(section, k, str(v)) return config
def get_dms_mapping(): r = requests.get(config.get('OTC_ENDPOINTS', 'dms_names'), headers={'X-Auth-Token': get_token()}) if r.status_code == 200: for queue in json.loads(r.text)["queues"]: config.set('DMS_IDS', queue['id'], queue['name']) logging.debug("Created name entry for DMS '%s'='%s' successfully" % (queue['id'], queue['name'])) consumer_group_url = config.get('OTC_ENDPOINTS', 'dms_consumer_names').replace( "<queue_id>", queue['id']) r = requests.get(consumer_group_url, headers={'X-Auth-Token': get_token()}) if r.status_code == 200: for group in json.loads(r.text)["groups"]: group_name = '%s/%s' % (queue['name'], group['name']) config.set('DMS_IDS', group['id'], group_name) logging.debug( "Created name entry for DMS Group '%s'='%s' successfully" % (group['id'], group_name)) save_config_file() elif r.status_code == 401: logging.warn( "Token seems to be expired, requesting a new one and retrying") request_token() get_dms_mapping() else: logging.error("Could not gather DMS names, got result code '%s'" % r.status_code)
def configureTelegram(config): verify = False last_updated = getLastUpdated(config, "telegram_created") choice = config.input_default_yes( "Configure Telegram? (Optional, used for reporting. Last updated: {})". format(last_updated)) if choice: config.get_input("telegram_bot_token", "Enter your Telegram Bot token") config.get_input("telegram_chat_id", "Enter your Telegram chat ID") config.set("telegram_created", int(time.time())) config.write_config() verify = True if not verify: verify = config.input_default_yes("Verify Telegram credentials?") if not verify: return bot = telegram.Bot(config.get("telegram_bot_token")) try: logger.info("Testing access to Telegram...") update_id = bot.get_updates()[0].update_id except IndexError: pass logger.info("We can access Telegram!")
def luigid(argv=sys.argv[1:]): import luigi.server import luigi.process import luigi.configuration parser = argparse.ArgumentParser(description=u'Central luigi server') parser.add_argument(u'--background', help=u'Run in background mode', action='store_true') parser.add_argument(u'--pidfile', help=u'Write pidfile') parser.add_argument(u'--logdir', help=u'log directory') parser.add_argument(u'--state-path', help=u'Pickled state file') parser.add_argument(u'--address', help=u'Listening interface') parser.add_argument(u'--unix-socket', help=u'Unix socket path') parser.add_argument(u'--port', default=8082, help=u'Listening port') opts = parser.parse_args(argv) if opts.state_path: config = luigi.configuration.get_config() config.set('scheduler', 'state_path', opts.state_path) if opts.background: # daemonize sets up logging to spooled log files logging.getLogger().setLevel(logging.INFO) luigi.process.daemonize(luigi.server.run, api_port=opts.port, address=opts.address, pidfile=opts.pidfile, logdir=opts.logdir, unix_socket=opts.unix_socket) else: if opts.logdir: logging.basicConfig(level=logging.INFO, format=luigi.process.get_log_format(), filename=os.path.join(opts.logdir, "luigi-server.log")) else: config = luigi.configuration.get_config() logging_conf = None if not config.getboolean('core', 'no_configure_logging', False): logging_conf = config.get('core', 'logging_conf_file', None) if logging_conf is not None and not os.path.exists( logging_conf): raise Exception( "Error: Unable to locate specified logging configuration file!" ) if logging_conf is not None: print( "Configuring logging from file: {0}".format(logging_conf)) logging.config.fileConfig(logging_conf) else: print( "Defaulting to basic logging; consider specifying logging_conf_file in luigi.cfg." ) logging.basicConfig(level=logging.INFO, format=luigi.process.get_log_format()) luigi.server.run(api_port=opts.port, address=opts.address, unix_socket=opts.unix_socket)
def del_op(self,nick,chan,msg): if authed(self,nick,chan): if msg in AUTHD: AUTHD.remove(msg) config.set('IRC','authed_users',','.join(AUTHD)) helpers.msg(self.client,chan,'%s: OK, I removed %s as a bot operator'%(nick,msg)) else: helpers.msg(self.client,chan,'%s: Um...%s is not a bot operator...'%(nick,msg))
def deannounce(self,nick,chan,msg): if authed(self,nick,chan): if msg in TOPIC_CHANS: TOPIC_CHANS.remove(msg) config.set('IRC','announce',','.join(TOPIC_CHANS)) helpers.msg(self.client,chan,'%s: OK, I will stop announcing status changes in %s'%(nick,msg)) else: helpers.msg(self.client,chan,'%s: Um...I am not announcing status changes in %s...'%(nick,msg))
def announce(self,nick,chan,msg): if authed(self,nick,chan): if msg not in TOPIC_CHANS: TOPIC_CHANS.append(msg) config.set('IRC','announce',','.join(TOPIC_CHANS)) helpers.msg(self.client,chan,'%s: OK, I will announce status changes in %s'%(nick,msg)) else: helpers.msg(self.client,chan,'%s: Um...I am *already* announcing status changes in %s...'%(nick,msg))
def part(self,nick,chan,msg): if authed(self,nick,chan): if msg in CHANS: CHANS.remove(msg) config.set('IRC','channels',','.join(CHANS)) helpers.part(cli, msg) helpers.msg(self.client,chan,'%s: OK, I parted %s'%(nick,msg)) else: helpers.msg(self.client,chan,'%s: Um...I am not in %s...'%(nick,msg))
def configure_logging(config): if config.get('verbosity'): verbosity = int(config.get('verbosity')) if verbosity == 1: level = logging.INFO elif verbosity > 1: level = logging.DEBUG config.set("logging.root.level", level) logging.config.dictConfig(config.get("logging"))
def getConfiguration(cfgfile=None, config_required={'Main': {'key1': 'value1', 'key2': 'value2'}}): ''' read an ini configuration file and return a dictionary of key/value pairs update configuration file if missing any sections accepts: cfgfile - path to configuration file config_required - nested dictionary in the following format: {'Section1': {'key1': 'value1', 'key2': 'value2'}, 'Section 2': {'key1': 'value1'} } ''' if not cfgfile: raise ValueError('no configuration file specified') # required configuraiton options # Section: {'option': 'default value'} logger = logging.getLogger(__name__) logger.debug('getting configuration from file: {}'.format(cfgfile)) cfgpath = os.path.dirname(cfgfile) # config_required = { # 'Main': {'credentials': os.path.join(cfgpath, 'credentials/'), # }, # } config = configuration.get_config(cfgfile) update_config = False logger.debug('checking sections') for section, values in list(config_required.items()): if not config.has_section(section): logger.warning('section: {} not found in {}'.format(section, cfgfile)) logger.debug('adding section {}'.format(section)) config.add_section(section) update_config = True for option, value in list(values.items()): if not config.has_option(section, option): logger.warning('option: {} not found in {}'.format(option, cfgfile)) logger.debug('adding option {}: {}'.format(option, value)) config.set(section, option, value) update_config = True # for section, options in config_required.items(): if update_config: try: logger.debug('updating configuration file at: {}'.format(cfgfile)) configuration.create_config(cfgfile, config) except Exception as e: logger.error(e) return(config)
def get_baud(): config = ConfigParser.SafeConfigParser() config.optionxform = str config.read(sololink_conf) serial_dev = config_get(config, config_dev_name) if serial_dev is None: logger.error("reading %s from %s", config_dev_name, sololink_conf) return None logger.debug("%s = %s", config_dev_name, serial_dev) serial_flow = config_getbool(config, config_flow_name, True) logger.debug("%s = %s", config_flow_name, serial_flow) expected_baud = config_getint(config, config_baud_name) if expected_baud is None: logger.warning("no %s in %s (will search)", config_baud_name, sololink_conf) logger.debug("%s = %s", config_baud_name, str(expected_baud)) baudlist = get_baudlist(config, expected_baud) logger.debug("baud list %s", str(baudlist)) logger.info("checking baud...") for baud in baudlist: logger.debug("baud %d...", baud) m = mavutil.mavlink_connection(serial_dev, baud=baud) m.set_rtscts(serial_flow) # Allow for missing one, then getting the next one. The expectation is # that we'll almost always have the right baud and get the first # heartbeat. start_us = clock.gettime_us(clock.CLOCK_MONOTONIC) hb = m.recv_match(type='HEARTBEAT', blocking=True, timeout=2.5) end_us = clock.gettime_us(clock.CLOCK_MONOTONIC) m.close() if hb is not None: # pixhawk is at baud # update config file if necessary logger.debug("heartbeat received in %0.3f sec", (end_us - start_us) / 1000000.0) logger.info("found at %d", baud) if baud != expected_baud: logger.info("updating %s %s from %s to %d", sololink_conf, config_baud_name, str(expected_baud), baud) config.set("solo", config_baud_name, str(baud)) config_write(config) return baud logger.info("not at %d", baud) logger.error("not detected at any baud") return None
def log_location_update(): config = configparser.ConfigParser() config.read(f"{Common.config_path}/logger.ini") config.set( "handler_fileHandler", "args", f"('{Common.report_path}/Automation.log', 'a')", ) with open(f"{Common.config_path}/logger.ini", "w") as f: config.write(f)
def add_op(self,nick,chan,msg): if authed(self,nick,chan): if msg not in AUTHD: AUTHD.append(msg) config.set('IRC','authed_users',','.join(AUTHD)) helpers.msg(self.client,chan,'%s: OK, I added %s as a bot operator'%(nick,msg)) elif msg == nick: helpers.msg(self.client,chan,'%s: Um...you already are an operator...'%(nick)) else: helpers.msg(self.client,chan,'%s: Um...%s is *already* a bot operator...'%(nick,msg))
def create_default_configuration(cls, default_profile_url): """ensure a default profile is configured.""" config_directory = cls._ensure_configuration_directory() config = ConfigParser() config.add_section('default') config.set('default', 'repository', default_profile_url) with open(os.path.join(config_directory, 'profiles'), 'w') as profiles_file: config.write(profiles_file) return cls()
def join(self,nick,chan,msg): if authed(self,nick,chan): if msg not in CHANS: CHANS.append(msg) config.set('IRC','channels',','.join(CHANS)) helpers.join(cli, msg) helpers.msg(self.client,chan,'%s: OK, I joined %s'%(nick,msg)) elif msg == chan: helpers.msg(self.client,chan,'%s: Um...this *is* %s...'%(nick,msg)) else: helpers.msg(self.client,chan,'%s: Um...I am *already* in %s...'%(nick,msg))
def configure_logging(config): if config.get('verbosity'): verbosity = int(config.get('verbosity')) if verbosity == 1: level = logging.INFO elif verbosity > 1: level = logging.DEBUG for logger in config.get("logging.loggers").keys() or []: config.set("logging.loggers.{}.level".format(logger), level) logging.config.dictConfig(config.get("logging"))
def request_token(): r = requests.post(config.get('OTC_ENDPOINTS', 'request_token'), json=json.loads(config.get('JSON_REQUEST', 'token'))) if r.status_code == 201: config.set('OTC_CREDENTIALS', 'token', r.headers['x-subject-token']) save_config_file() logging.info("New token generated") return r.headers['x-subject-token'] else: logging.error("Request for token got result code '%s'" % r.status_code) exit(2)
def main(): args = parser.parse_args() logging.debug(args) if args.verbose: logger.setLevel(logging.INFO) if args.debug: logger.setLevel(logging.DEBUG) config.set('Tasker', 'wrap-behavior', args.wrap) config.set('Tasker', 'wrap-width', str(args.width)) config.set('Tasker', 'show-priority-z', str(args.showz)) config.set('Tasker', 'priority-z-last', str(args.integrate)) config.set('Tasker', 'theme-name', args.theme) tasklib = TaskLib(config) tasklib.set_theme(args.theme) colorama.init(strip=True, autoreset=True) cli = TaskCmd(config=config, lib=tasklib) for e_point in pkg_resources.iter_entry_points('tasker_minions'): minion = e_point.load() cli.add_minion(e_point.name, minion()) # the main library should have already loaded a library for the # plugin, if one exists. # the minion should now have a master if e_point.name in cli.lib.libraries: cli.minions[e_point.name].lib = cli.lib.libraries.get(e_point.name) if args.directory: if args.command: print(tasklib.libraries[args.command].directory) else: print(config['Files']['tasker-dir']) return 0 if args.version: if args.command: print(tasklib.libraries[args.command].__version__) else: print(__version__) return 0 elif args.interact: cli.cmdloop() elif not args.command: cli.onecmd('list') else: cli.onecmd(' '.join(sys.argv[sys.argv.index(args.command):])) return 0
def write_config_to_filename(config_dictionary, config_filename): config = SafeConfigParser() for section in config_dictionary: config.add_section(section) variables = config_dictionary[section] for v in variables: config.set(section, v, variables[v]) mode = stat.S_IRUSR | stat.S_IWUSR flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL fd = os.open(config_filename, flags, mode) f = os.fdopen(fd, 'wb') config.write(f) f.close()
def get_cache_dir(config, target): cache_dir = None try: cache_dir = config.get('gist-sync', 'cache_dir').strip() except six.moves.configparser.NoSectionError: logger.debug('no section: title: %s', target) config.add_section('gist-sync') except six.moves.configparser.NoOptionError: logger.debug('no option: title: %s', target) if not cache_dir: cache_dir = '.gist-sync' config.set('gist-sync', 'cache_dir', cache_dir) return cache_dir
def write_config(albumNum, trackNum): # Add content to the file config = ConfigParser.ConfigParser() config.add_section('cdc') config.set('cdc', 'album', albumNum) config.set('cdc', 'track', trackNum) # Create the configuration file as it doesn't exist yet with open(CONFIG, 'w') as file: config.write(file) logger.info('write config album: {}, track: {}'.format(albumNum, trackNum)) return
def load_config(defaults, files): config = ConfigParser.ConfigParser() with open(defaults, 'r') as f: config.readfp(f) config.read(files) if 'ACCOUNTING_CLIENT_DIR' in os.environ: # hack to make it possible to find client dir in buildbot if config.has_section('accounting'): config.set('accounting', 'client_dir', os.environ['ACCOUNTING_CLIENT_DIR']) return config
def updateLogConfig(fileName, loggerName): try: config = openLogConfig(fileName) config.set('loggers', 'keys', config.get('loggers', 'keys')+','+loggerName) print 'new logger names:',config.get('loggers', 'keys') config.add_section('logger_'+loggerName) config.set('logger_'+loggerName, 'handlers', 'consoleHandler') config.set('logger_'+loggerName, 'propagate', '0') config.set('logger_'+loggerName, 'level', 'ERROR') config.set('logger_'+loggerName, 'qualname', loggerName) configfile = open(fileName, 'wb') config.write(configfile) except: pass
def set_dev_mode(): """ Setup `development` mode to run the skill """ # Start builtin WSGIRefServer logger.warning( "Starting bottle with WSGIRefServer. Do not use in production!") config.set('http', 'server', 'wsgiref') config.set('http', 'host', 'localhost') # Load swagger UI try: import swagger_ui except ModuleNotFoundError: logger.warning("Swagger UI not found, starting without...")
def __parseCfgOptions(self): # Is there a configuration available? configDir = self.get('core.config') configFiles = self.__getCfgFiles(os.path.join(configDir, "config.d")) configFiles.insert(0, os.path.join(configDir, "config")) config = configparser.RawConfigParser() filesRead = config.read(configFiles) # Bail out if there's no configuration file if not filesRead: raise ConfigNoFile( "No usable configuration file (%s/config) found!" % configDir) # Walk thru core configuration values and push them into the registry for section in config.sections(): if not section in self.__registry: self.__registry[section] = {} self.__registry[section].update(config.items(section)) if not hasattr(sys, "_called_from_test") or getattr( sys, "_called_from_test") is False: # do not use this in tests self.__user_config = configparser.RawConfigParser() # read the settings changed via gui client if os.path.exists( os.path.join(self.get('core.user-config'), "user-config")): filesRead = self.__user_config.read( os.path.join(self.get('core.user-config'), "user-config")) if not filesRead: raise ConfigNoFile( "No usable GUI configuration file (%s/user-config) found!" % configDir) # Initialize the logging module on the fly try: if self.__user_config is not None: for section in self.__user_config.sections(): for option, value in self.__user_config.items(section): config.set(section, option, value) tmp = StringIO() config.write(tmp) tmp2 = StringIO(tmp.getvalue()) logging.config.fileConfig(tmp2) except configparser.NoSectionError: logging.basicConfig( level=logging.ERROR, format='%(asctime)s (%(levelname)s): %(message)s')
def setup(self): """ Call super(TESTNAME, self).setup() """ self.kernel = MockKernel() self.kernel.services = {} try: config.add_section('Database') except: pass config.set('Database','connectionstring','sqlite://') if self.kernel.session == None: raise Exception("The test framework is broken")
def change_server_name(self, new_name): with invalidates(Rcon.get_name): remote_path = f"{self.base_path}/ServerConfig/Server.ini" logger.info("Updating name in %s", remote_path) f = BytesIO() self.adapter.get_file(remote_path, f) print(f.getvalue()) config = ConfigParser() config.read_string(f.getvalue().decode()) config.set("Server", "Name", f'"{new_name}"') temp_f = StringIO() config.write(temp_f) f = BytesIO(temp_f.getvalue().encode()) self.adapter.put_file(f, remote_path) logger.info("Updated name to %s", new_name)
def write_config(self, filename): # Ecrit les config de toutes les sections dans un autre fichier """ :param filename: """ LOG.info("Writing .ini file (%s)" % filename) config = SafeConfigParser(allow_no_value=True) iniout = open(filename, mode="w") for section in self._sections: config.add_section(section) if hasattr(self, section): for opt in getattr(self, section).config: config.set(section, str(opt), str(getattr(self, section).config.get(opt))) config.write(iniout)
class AppLogger: """ Application log """ working_dir = os.path.dirname(__file__).replace('libcommon','') __log_single = None __log_ini_file_temp = os.path.join(working_dir,'conf/log/log_config_temp.conf') __log_ini_file = os.path.join(working_dir,'conf/log/log_config.conf') filecontent = open(__log_ini_file_temp,'rb') config = ConfigParser.ConfigParser() config.readfp(filecontent) filecontent.close() config.set("handler_file","args",(os.path.join(working_dir,'log/app.log'),'d',1,'%Y-%m-%d')) configContent = open(__log_ini_file,'wb') config.write(configContent) configContent.close() def __init__(self): if not AppLogger.__log_single: try: logging.config.fileConfig(AppLogger.__log_ini_file) except: pass AppLogger.__log_single = logging.getLogger(None) def getHandler(self): return AppLogger.__log_single
def logging_config(log_file): dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) log_config_file = os.path.join(dir_path, 'logger', 'configs', 'dlt_py_logger.ini') if not os.path.isfile(log_config_file): err_str = 'Log configuration file not found:'.format(log_config_file) raise Exception(err_str) config = ConfigParser.RawConfigParser() config.read(log_config_file) config.set('handler_consoleHandler', 'args', "('"+log_file+"',)") log_root = os.path.dirname(log_file) config_file = os.path.join(log_root, 'logger_config_file') with open(config_file, 'wb') as configfile: config.write(configfile) logging.config.fileConfig(config_file, disable_existing_loggers=False) return logging
def configure_logging(): """ Configure logging """ from .tracing import initialize_tracer from . import log initialize_tracer() logging.config.dictConfig(log.conf[log.LOG_FORMAT]) if log.LOG_FORMAT == 'gelf': config.set('http', 'logger_class', 'skill_sdk.log.GunicornLogger') # as bottle writes to stdout/stderr directly, patch it bottle_logger = logging.getLogger('bottle') bottle._stdout = lambda msg: bottle_logger.debug(msg) bottle._stderr = lambda msg: bottle_logger.info(msg)
def set_baud(new_baud): config = ConfigParser.SafeConfigParser() config.optionxform = str config.read(sololink_conf) old_baud = config_getint(config, config_baud_name) if old_baud is None: logger.warning("no %s in %s", config_baud_name, sololink_conf) return logger.debug("%s = %s", config_baud_name, str(old_baud)) if new_baud != old_baud: logger.info("updating %s %s from %s to %d", sololink_conf, config_baud_name, str(old_baud), new_baud) config.set("solo", config_baud_name, str(new_baud)) config_write(config)
def main(): # 初始化日志 initLog() #TEST print("config测试") config.set_file("config.ini") print(CONFIG_PATH) section = 'defaults' print(config.sections()) config.set("log", "config_path", "logging.ini") print("test:"), print(config.get("defaults", "test")) config.set(section, "test", "test2") #config.add_section(section) config.save() return
def create_config(): options = parse_arguments() # 1) read the default config at "~/.ethereum" config = konfig.read_config() # 2) read config from file if getattr(options, 'config_file'): config.read(getattr(options, 'config_file')) # 3) apply cmd line options to config for section in config.sections(): for a, v in config.items(section): if getattr(options, a, None) is not None: config.set(section, a, getattr(options, a)) return config
def create_config(): options = parse_arguments() # 1) read the default config at "~/.ethereum" config = konfig.read_config() # 2) read config from file if getattr(options, 'config_file'): config.read(getattr(options, 'config_file')) # 3) apply cmd line options to config for section in config.sections(): for a,v in config.items(section): if getattr(options, a, None) is not None: config.set(section, a, getattr(options,a)) return config
def get_nat_mapping(): r = requests.get(config.get('OTC_ENDPOINTS', 'nat_names'), headers={'X-Auth-Token': get_token()}) if r.status_code == 200: for nat_gateway in json.loads(r.text)["nat_gateways"]: config.set('NAT_IDS', nat_gateway['id'], nat_gateway['name']) logging.debug("Created name entry for NAT '%s'='%s' successfully" % (nat_gateway['id'], nat_gateway['name'])) save_config_file() elif r.status_code == 401: logging.warn( "Token seems to be expired, requesting a new one and retrying") request_token() get_nat_mapping() else: logging.error("Could not gather NAT names, got result code '%s'" % r.status_code)
def getConfiguration(cfgfile): # required configuraiton options # Section: {'option': 'default value'} logger = logging.getLogger(__name__) logger.debug('getting configuration from file: {}'.format(cfgfile)) cfgpath = os.path.dirname(cfgfile) config_required = { 'Main': { 'credentials': os.path.join(cfgpath, 'credentials/'), }, } config = configuration.get_config(cfgfile) update_config = False logger.debug('checking sections') for section, values in list(config_required.items()): if not config.has_section(section): logger.warning('section: {} not found in {}'.format( section, cfgfile)) logger.debug('adding section {}'.format(section)) config.add_section(section) update_config = True for option, value in list(values.items()): if not config.has_option(section, option): logger.warning('option: {} not found in {}'.format( option, cfgfile)) logger.debug('adding option {}: {}'.format(option, value)) config.set(section, option, value) update_config = True # for section, options in config_required.items(): if update_config: try: logger.debug('updating configuration file at: {}'.format(cfgfile)) configuration.create_config(cfgfile, config) except Exception as e: logger.error(e) return (config)
def luigid(argv=sys.argv[1:]): import luigi.server import luigi.process import luigi.configuration parser = argparse.ArgumentParser(description=u'Central luigi server') parser.add_argument(u'--background', help=u'Run in background mode', action='store_true') parser.add_argument(u'--pidfile', help=u'Write pidfile') parser.add_argument(u'--logdir', help=u'log directory') parser.add_argument(u'--state-path', help=u'Pickled state file') parser.add_argument(u'--address', help=u'Listening interface') parser.add_argument(u'--unix-socket', help=u'Unix socket path') parser.add_argument(u'--port', default=8082, help=u'Listening port') opts = parser.parse_args(argv) if opts.state_path: config = luigi.configuration.get_config() config.set('scheduler', 'state_path', opts.state_path) if opts.background: # daemonize sets up logging to spooled log files logging.getLogger().setLevel(logging.INFO) luigi.process.daemonize(luigi.server.run, api_port=opts.port, address=opts.address, pidfile=opts.pidfile, logdir=opts.logdir, unix_socket=opts.unix_socket) else: if opts.logdir: logging.basicConfig(level=logging.INFO, format=luigi.process.get_log_format(), filename=os.path.join(opts.logdir, "luigi-server.log")) else: config = luigi.configuration.get_config() logging_conf = None if not config.getboolean('core', 'no_configure_logging', False): logging_conf = config.get('core', 'logging_conf_file', None) if logging_conf is not None and not os.path.exists(logging_conf): raise Exception("Error: Unable to locate specified logging configuration file!") if logging_conf is not None: print("Configuring logging from file: {}".format(logging_conf)) logging.config.fileConfig(logging_conf) else: print("Defaulting to basic logging; consider specifying logging_conf_file in luigi.cfg.") logging.basicConfig(level=logging.INFO, format=luigi.process.get_log_format()) luigi.server.run(api_port=opts.port, address=opts.address, unix_socket=opts.unix_socket)
def __parseCfgOptions(self): # Is there a configuration available? configDir = self.get('core.config') configFiles = self.__getCfgFiles(os.path.join(configDir, "config.d")) configFiles.insert(0, os.path.join(configDir, "config")) config = configparser.RawConfigParser() filesRead = config.read(configFiles) # Bail out if there's no configuration file if not filesRead: raise ConfigNoFile("No usable configuration file (%s/config) found!" % configDir) # Walk thru core configuration values and push them into the registry for section in config.sections(): if not section in self.__registry: self.__registry[section] = {} self.__registry[section].update(config.items(section)) if not hasattr(sys, "_called_from_test") or getattr(sys, "_called_from_test") is False: # do not use this in tests self.__user_config = configparser.RawConfigParser() # read the settings changed via gui client if os.path.exists(os.path.join(self.get('core.user-config'), "user-config")): filesRead = self.__user_config.read(os.path.join(self.get('core.user-config'), "user-config")) if not filesRead: raise ConfigNoFile("No usable GUI configuration file (%s/user-config) found!" % configDir) # Initialize the logging module on the fly try: if self.__user_config is not None: for section in self.__user_config.sections(): for option, value in self.__user_config.items(section): config.set(section, option, value) tmp = StringIO() config.write(tmp) tmp2 = StringIO(tmp.getvalue()) logging.config.fileConfig(tmp2) except configparser.NoSectionError: logging.basicConfig(level=logging.ERROR, format='%(asctime)s (%(levelname)s): %(message)s')
def set_syslog_logger(self, enabled): config = ConfigParser.ConfigParser() config.readfp(open(self.logconf)) handlers = config.get("logger_root", "handlers") handler_list = handlers.split(',') if enabled: new_list = "syslogHandler," else: new_list = "" for handler in handler_list: if handler == "syslogHandler": continue new_list += handler + "," new_list = new_list.rstrip(',') config.set("logger_root", "handlers", new_list) with open(self.logconf, 'w') as configfile: config.write(configfile)
def init_db(): global config parser = optparse.OptionParser(usage='%prog -c CONFIGFILE [OPTIONS] --last-ride=[NUM]', description="Set the last (most recent) sync'd ride ID from Garmin Connect.") _setup_parser_common(parser) #parser.add_option('--user-id', dest='userid', metavar='ID', type='int', # help='The Garmin Connect userid.') parser.add_option('--last-ride', dest='last_ride', metavar='ID', type='int', help='The last (most recent) ride that has been synchronized.') (options, args) = parser.parse_args() if not options.config_file: parser.error("No config file specified") parser.print_usage() sys.exit(2) if not options.last_ride: parser.error("Must specify the --last-ride option.") parser.print_help() sys.exit(2) init_config(options.config_file) logging.config.fileConfig(options.config_file) if options.database: config.set('main', 'database_path', options.database) gc_username = config.get('main', 'gc.username') gc_password = config.get('main', 'gc.password') gc_client = ConnectClient(gc_username, gc_password) activity = gc_client.get_activity(options.last_ride) userid = activity['userId'].encode('latin1') # It's a unicode string in response with closing(shelve.open(config.get('main', 'database_path'), 'c')) as db: db[userid] = options.last_ride logging.info("Updated last activity id for user {0} to {1}".format(userid, db[userid]))
def main(): # process configuration file (if exists) and setup logging config = SafeConfigParser(cfg_defaults) config.add_section('common') for key, value in cfg_defaults.items(): config.set('common', key, str(value)) if config.read(cfg_filename): logging.config.fileConfig(cfg_filename) else: logging.basicConfig(stream=sys.stdout, level=cfg_defaults['logLevel'], format=cfg_defaults['logFormat']) # process command line arguments parser = OptionParser(version='{0} {1}'.format(__description__, __version__), description=__description__) parser.add_option('-b', '--brokerUrl', dest='brokerUrl', metavar='URL', type='string', default=config.get('common', 'brokerUrl'), help='context broker URL [default=%default]'), parser.add_option('-l', '--logLevel', dest='logLevel', metavar='LEVEL', choices=[level for level in logging._levelNames.keys() if isinstance(level, str)], default=config.get('common', 'logLevel'), help='logging level [default=%default]') (opts, args) = parser.parse_args() # @UnusedVariable config.set('common', 'brokerUrl', opts.brokerUrl) config.set('common', 'logLevel', opts.logLevel) logging.root.setLevel(opts.logLevel) # rpc connection connection = rpc.create_connection() try: logging.info('Context Broker URL: %s', config.get('common', 'brokerUrl')) listen(connection, config) finally: connection.close() return 0
def create_config(): options = parse_arguments() # 1) read the default config at "~/.ethereum" config = konfig.read_config() # 2) read config from file cfg_fn = getattr(options, 'config_file') if cfg_fn: if not os.path.exists(cfg_fn): konfig.read_config(cfg_fn) # creates default config.read(cfg_fn) # 3) apply cmd line options to config for section in config.sections(): for a,v in config.items(section): if getattr(options, a, None) is not None: config.set(section, a, getattr(options,a)) # set datadir data_dir.set(config.get('misc', 'data_dir')) return config
def set_config_data(self, data: dict): for identifier, update_data in data.items(): # dont rewrite empty... if not len(update_data): continue if identifier == "meta": hostname = update_data.get("hostname", None) if hostname: SysUtil.set_hostname(hostname) if update_data.get("update", False): SysUtil.update_from_git() config = SysUtil.ensure_config(identifier) sections = set(config.sections()).intersection(set(update_data.keys())) for section in sections: update_section = update_data[section] options = set(config.options(section)).intersection(set(update_section.keys())) for option in options: config.set(section, option, str(update_section[option])) SysUtil.write_config(config, identifier)
def __parse_config_file(file = None): """ Parse the configuration file :param file: Configuration file to parse :type file: String """ config = ConfigParser.ConfigParser() if file is not None: # If config file, load Logger config Config._log = logging logging.config.fileConfig(file) config.readfp(open(file)) else: # Set defaults config.add_section("app:main") config.set("app:main","db_uri","mongodb://localhost") config.set("app:main","db_name","mobyle") Config._config = config
def on_file_open_action_activated(self,widget,data=None): """ Displays a file chooser dialog. When a file is selected, a new job is created using the file. """ dialog = Gtk.FileChooserDialog(title='Open File - Inkcut',action=Gtk.FileChooserAction.OPEN, buttons=(Gtk.STOCK_CANCEL,Gtk.ResponseType.CANCEL,Gtk.STOCK_OPEN,Gtk.ResponseType.OK)) last_folder = config.get('Inkcut','last_file_open_dir') or os.getenv('USERPROFILE') or os.getenv('HOME') dialog.set_current_folder(last_folder) filter = Gtk.FileFilter() filter.set_name("SVG Images") filter.add_mime_type("image/svg+xml") filter.add_pattern("*.svg") dialog.add_filter(filter) filter = Gtk.FileFilter() filter.set_name("All files") filter.add_pattern("*") dialog.add_filter(filter) # Todo, set these from plugins filter = Gtk.FileFilter() filter.set_name("Plots") for pattern in PLUGIN_EXTENSIONS: filter.add_pattern(pattern) dialog.add_filter(filter) response = dialog.run() if response == Gtk.ResponseType.OK: filename = dialog.get_filename() last_folder = os.path.abspath(os.path.dirname(filename)) config.set('Inkcut','last_file_open_dir',last_folder) log.debug("Set last_file_open_dir = %s"%last_folder) msg = "Opening file: %s..."%filename self.flash(msg,indicator=True) GObject.idle_add(self.create_job,filename) dialog.destroy()
def ensure_light_config(cls, identifier): """ ensures a configuration file exists for this identifier. if a config file doesnt exist then it will create a default one. :param identifier: identifier of the light :type identifier: str :return: configuration for the light :rtype: configparser.ConfigParser """ config = configparser.ConfigParser() config.read_string(default_light_config) path = cls.identifier_to_ini(identifier) try: if len(config.read(path)): return config except Exception as e: print(str(e)) if "{identifier}" in config.get("light", "file_path"): config.set("light", "file_path", config.get('light', "file_path").format(identifier=identifier)) cls.write_light_config(config, identifier) return config
def _setup_system(self): config_dir = utils.Utils().get_app_dir() config_path = os.path.join(config_dir, '.guardiancl.ini') if not os.path.exists(config_path): config = configparser2.ConfigParser() config.add_section('ROUTES') config.set('ROUTES', 'auth', 'http://guardiaocloud.com.br/service/v1/authenticate') config.set('ROUTES', 'devices', 'http://guardiaocloud.com.br/service/v1/devices') config.set('ROUTES', 'collect', 'http://guardiaocloud.com.br/collect') with open(config_path, 'w') as configfile: config.write(configfile)
def create_config(): config = ConfigParser.ConfigParser() # set some defaults, which may be overwritten config.add_section('network') config.set('network', 'listen_host', 'localhost') config.set('network', 'listen_port', '30303') config.set('network', 'num_peers', '5') config.set('network', 'remote_port', '30303') config.set('network', 'remote_host', '') config.set('network', 'client_id', 'Ethereum(py)/0.0.1') config.add_section('misc') config.set('misc', 'verbosity', '1') config.set('misc', 'config_file', None) config.set('misc', 'logging', None) config.add_section('wallet') # NODE_ID == pubkey, needed in order to work with Ethereum(++) config.set('wallet', 'pub_key', 'J\x02U\xfaFs\xfa\xa3\x0f\xc5\xab\xfd<U\x0b\xfd\xbc\r<\x97=5\xf7&F:\xf8\x1cT\xa02\x81\xcf\xff"\xc5\xf5\x96[8\xacc\x01R\x98wW\xa3\x17\x82G\x85I\xc3o|\x84\xcbD6\xbay\xd6\xd9') usage = "usage: %prog [options]" parser = OptionParser(usage=usage, version="%prog 0.1a") parser.add_option("-l", "--listen", dest="listen_port", default=config.get('network', 'listen_port'), help="<port> Listen on the given port for incoming connected (default: 30303)." ) parser.add_option("-r", "--remote", dest="remote_host", help="<host> Connect to remote host (try: 54.201.28.117 or 54.204.10.41)" ) parser.add_option("-p", "--port", dest="remote_port", default=config.get('network', 'remote_port'), help="<port> Connect to remote port (default: 30303)" ) parser.add_option("-v", "--verbose", dest="verbosity", default=config.get('misc', 'verbosity'), help="<0 - 3> Set the log verbosity from 0 to 3 (default: 1)") parser.add_option("-L", "--logging", dest="logging", default=config.get('misc', 'logging'), help="<logger1:LEVEL,logger2:LEVEL> set the console log level for logger1, logger2, etc.\ Empty loggername means root-logger, e.g. 'pyethereum.wire:DEBUG,:INFO'. Overrides '-v'") parser.add_option("-x", "--peers", dest="num_peers", default=config.get('network', 'num_peers'), help="<number> Attempt to connect to given number of peers (default: 5)") parser.add_option("-C", "--config", dest="config_file", help="read coniguration") (options, args) = parser.parse_args() # set network options for attr in ('listen_port', 'remote_host', 'remote_port', 'num_peers'): config.set('network', attr, getattr( options, attr) or config.get('network', attr)) # set misc options for attr in ('verbosity', 'config_file'): config.set( 'misc', attr, getattr(options, attr) or config.get('misc', attr)) if len(args) != 0: parser.error("wrong number of arguments") sys.exit(1) if config.get('misc', 'config_file'): config.read(config.get('misc', 'config_file')) #configure logging loggerlevels = getattr(options, 'logging') or '' configure_logging(loggerlevels, verbosity=config.getint('misc', 'verbosity')) return config
def main(args=None): """Entry point for the Numina CLI.""" # Configuration args from a text file config = configparser.SafeConfigParser() # Building programatically config.add_section('numina') config.set('numina', 'format', 'yaml') # Custom values, site wide and local config.read(['.numina/numina.cfg', os.path.join(xdg_config_home, 'numina/numina.cfg')]) parser = argparse.ArgumentParser( description='Command line interface of Numina', prog='numina', epilog="For detailed help pass --help to a target" ) parser.add_argument( '-l', action="store", dest="logging", metavar="FILE", help="FILE with logging configuration" ) parser.add_argument( '-d', '--debug', action="store_true", dest="debug", default=False, help="make lots of noise" ) subparsers = parser.add_subparsers( title='Targets', description='These are valid commands you can ask numina to do.' ) # Init subcommands cmds = ['clishowins', 'clishowom', 'clishowrecip', 'clirun', 'clirunrec'] for cmd in cmds: cmd_mod = import_module('.'+cmd, 'numina.user') register = getattr(cmd_mod, 'register', None) if register is not None: register(subparsers) # Load plugin commands for entry in pkg_resources.iter_entry_points(group='numina_plugins'): reg_fun = entry.load() try: reg_fun(subparsers) except StandardError: # Error loading plugin pass args = parser.parse_args(args) # logger file if args.logging is not None: logging.config.fileConfig(args.logging) else: # This should be a default path in defaults.cfg try: args.logging = config.get('numina', 'logging') logging.config.fileConfig(args.logging) except configparser.Error: logging.config.dictConfig(numina_cli_logconf) _logger.info('Numina simple recipe runner version %s', __version__) args.command(args)
def get_service_configuration(configfile_handler=None): """I set rdfrest Service default configuration options and possibly override them with the values extracted from a configuration file. :param configfile_handler: optional handler of a configuration file :return: Configuration object. """ # When allow_no_value=True is passed, options without values return None # The value must be used as flags i.e # [rdf_database] # repository # and not : # repository = # which will return an empty string whatever 'allow_no_value' value is set config = SafeConfigParser(allow_no_value=True) # Setting default values config.add_section('server') config.set('server', 'host-name', 'localhost') config.set('server', 'port', '8001') config.set('server', 'threads', '2') config.set('server', 'base-path', '') config.set('server', 'force-ipv4', 'false') config.set('server', 'max-bytes', '-1') config.set('server', 'flash-allow', 'false') config.set('server', 'max-triples', '-1') config.set('server', 'cors-allow-origin', '') config.set('server', 'reset-connection', 'false') config.set('server', 'send-traceback', 'false') config.add_section('ns_prefix') # A future specification section "httpd" or "wsgi" # may be needed for HttpFrontend #config.add_section('httpd') config.add_section('plugins') config.set('plugins', 'post_via_get', 'false') # TODO : optional plugin specific configuration #config.add_section('post_via_get') config.add_section('rdf_database') config.set('rdf_database', 'repository', '') config.set('rdf_database', 'force-init', 'false') config.add_section('logging') config.set('logging', 'loggers', '') # Loading from config file if configfile_handler is not None: config.readfp(configfile_handler) return config
def add(name, val): if not config.has_option(section, name): config.set(section, name, val)
def initLogging(cls, loggingLevel=None, console="stderr", logToFile=False): """ A lower-level function to initialize python logging for the calling process. Supports logging output to a console (stderr or stdout) and/or log file. See also higher-level functions initTool() and initService(). NOTE: by convention, logging should be initialized only by the main process. modules that provide APIs should not initialize logging as it would clobber the logging configuration desired by the application. :param loggingLevel: logging level string for filtering in root logger and output handlers; one of: "DEBUG", "INFO", "WARNING", "WARN", "ERROR", "CRITICAL" or "FATAL" that correspond to logging.DEBUG, logging.INFO, etc. Defaults to "INFO". :param console: Console logging destination; either "stderr" or "stdout"; None to suppress output to console. :param logToFile: True to output logs to a file. If enalbed, a log file specific to the calling app instance will be created at file path generated by our getApplicationLogFilePath method. """ validLoggingLevels = ["DEBUG", "INFO", "WARNING", "WARN", "ERROR", "CRITICAL", "FATAL"] if loggingLevel is not None and loggingLevel not in validLoggingLevels: raise ValueError("loggingLevel %r not one of %s" % (loggingLevel, validLoggingLevels)) consoleHandlerArgsMap = dict( stderr="(sys.stderr, )", stdout="(sys.stdout, )" ) if console is not None and console not in consoleHandlerArgsMap: raise ValueError("console %r not one of %s" % (console, consoleHandlerArgsMap.keys())) # Configure logging timestamp for UTC logging.Formatter.converter = time.gmtime # Load the config tempalte config = ConfigParser() with open(cls.getLoggingConfTemplatePath(), 'r') as fileObj: config.readfp(fileObj) # Customize the config template handlers = [] if console is not None: handlers.append("console") if loggingLevel is not None: config.set("handler_console", "level", loggingLevel) config.set("handler_console", "args", consoleHandlerArgsMap[console]) if logToFile: handlers.append("file") # Get a log file path specific to the calling app logFilePath = cls.getApplicationLogFilePath() # Create the directory that will contain the log file makeDirectoryFromAbsolutePath(os.path.dirname(logFilePath)) if loggingLevel is not None: config.set("handler_file", "level", loggingLevel) config.set("handler_file", "filename", logFilePath) if not handlers: print >> sys.stderr, ( "WARNING: logging_support is disabling logging output because all " "output handlers are disabled") handlers.append("null") # Convert list of logging output handler names into comma-separated string handlers = ",".join(handlers) # Initialize the root logger if loggingLevel is not None: config.set("logger_root", "level", loggingLevel) config.set("logger_root", "handlers", handlers) # Initialize the list of all logging output handlers config.set("handlers", "keys", handlers) # Dump the customized config into a StringIO object for logging setup customConfigFile = StringIO() config.write(customConfigFile) customConfigFile.seek(0) # Initialize logging from StringIO file object logging.config.fileConfig(customConfigFile, disable_existing_loggers=False)