def load_config(self, configpath, pkg=False): """ Use ConfigParser module to load config sections :param pkg: file is inside the package (configpath is the relative path inside the package) :param configpath: :return: """ if not pkg and not os.path.exists(configpath): LOG.error("Configuration file not found (%s)" % configpath) from errno import ENOENT raise OSError(ENOENT) config = SafeConfigParser(allow_no_value=True) if pkg: with pkgr.resource_stream(__name__, configpath) as conf: config.readfp(conf) else: config.read(configpath) LOG.debug(config) for section in config.sections(): if hasattr(self, section): tmp = format_dict(dict(config.items(section))) getattr(self, section).config.update(tmp) LOG.debug("%s config updated" % section) LOG.debug( "%s.%s : %s" % (self.__class__.__name__, section, getattr(self, section))) else: LOG.warning("Unknow config section %s" % section)
def load_attrid_config(cfg_path=None): id_map = {} id_with_ip = {} id_with_ratio = {} config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(cfg_path))) ip_pattern = re.compile(r'^(\d+\.){3}\d+$') num_pattern = re.compile(r'^[\d\.]+$') attr_items = config.items('id_map') for item in attr_items: val = item[1].split(',') id_map[val[0].strip()] = item[0].strip() if len(val) >= 2: match = num_pattern.match(val[1]) # print item[0].strip(),"->",val[0].strip(),"->",val[1].strip() if match: id_with_ratio[item[0].strip()] = float(val[1].strip()) # print "match ratio:",val[1].strip() if len(val) > 2: print item[0].strip(), "->", val[0].strip( ), "->", val[1].strip(), "->", val[2].strip() match = ip_pattern.match(val[2]) if match: # print "match other ip:",val[2].strip() id_with_ip[item[0].strip()] = val[2].strip() return id_map, id_with_ratio, id_with_ip
def parse_cfg_with_sections(stream): """Return as dict of dict of ... """ #Experimental: """ ConfigParser sections become sub-sub sections when separated by dots. [foo.bar] baz = 42 is equivalent to JSON {"foo": {"bar": {"baz": 42}}} """ content = stream.read() result = dict() try: jdict = json.loads(NativeIO(content).read()) return jdict except ValueError: pass #logger.exception('Could not parse stream as JSON.') try: config = ConfigParser() #strict=False? config.optionxform = str config.readfp(NativeIO(content)) sections = config.sections() for sec in sections: result[sec] = dict(config.items(sec)) return result except: raise
def read_config(albumNum, trackNum): with open(CONFIG, 'r') as file: cfgfile = file.read() config = ConfigParser.RawConfigParser(allow_no_value=True) config.readfp(io.BytesIO(cfgfile)) # list all contents #logger.info('List all contents') #logger.info('Sections: {}'.format(config.sections())) #for section in config.sections(): # logger.info('Section: {}'.format(section)) # logger.info('Options: {}'.format(config.options(section))) # for option in config.options(section): # val = config.get(section, option) # if val == -1: # logger.warning('skip: {}'.format(option)) # logger.info('read config: {} {} {}'.format(section, option, val)) try: albumNum = config.getint('cdc', 'album') trackNum = config.getint('cdc', 'track') logger.info('read config album: {}, track: {}'.format( albumNum, trackNum)) except: logger.warning('can\'t read config file') return [albumNum, trackNum]
def get_logging_config(cfg_path=None): logging_config = {'log_level': None} logging_config['collector_log_file'] = './log/collector.log' config_path = get_config_path(cfg_path) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) for option in logging_config: if config.has_option('Main', option): logging_config[option] = config.get('Main', option) levels = { 'CRITICAL': logging.CRITICAL, 'DEBUG': logging.DEBUG, 'ERROR': logging.ERROR, 'FATAL': logging.FATAL, 'INFO': logging.INFO, 'WARN': logging.WARN, 'WARNING': logging.WARNING, } if config.has_option('Main', 'log_level'): logging_config['log_level'] = levels.get( config.get('Main', 'log_level')) if config.has_option('Main', 'disable_file_logging'): logging_config['disable_file_logging'] = config.get( 'Main', 'disable_file_logging').strip().lower() in ['yes', 'true', 1] else: logging_config['disable_file_logging'] = False return logging_config
def ini_to_dict(ini_str): import re try: from configparser import SafeConfigParser except ImportError: from ConfigParser import SafeConfigParser finally: config, conf_dict = SafeConfigParser(), {} config.optionxform = str try: config.read_string(ini_str) except AttributeError: from io import StringIO config.readfp(StringIO(ini_str)) for sect in config.sections(): if 'default' != sect: conf_dict[sect] = tmp_dict = {} else: tmp_dict = conf_dict for key, val in config.items(sect): match_1 = re.search('([^#]*)', val) tmp_dict[key] = match_1.groups()[0].strip().strip("'") return conf_dict
def run(self): if(self.loggerFlag): logger = logging.getLogger(type(self).__name__) logger.info("Starting %s thread." % (self.getName())) try: processData = True config = ConfigParser.RawConfigParser() configFile = open(self.configFilename, 'r') config.readfp(configFile) dbUser = config.get('Database', 'user') dbPwd = config.get('Database', 'password') dbHost = config.get('Database', 'host') dbName = config.get('Database', 'name') dbConnType = config.get('Database', 'connectionstring') configFile.close() db = xeniaAlchemy() if(db.connectDB(dbConnType, dbUser, dbPwd, dbHost, dbName, False) == True): if(logger): logger.info("Succesfully connect to DB: %s at %s" %(dbName,dbHost)) else: logger.error("Unable to connect to DB: %s at %s. Terminating script." %(dbName,dbHost)) processData = False #sys.exit(-1) except ConfigParser.Error, e: if(logger): logger.exception(e)
def run(): config = ConfigParser.ConfigParser() with open(CONFIG_PATH) as config_fp: config.readfp(config_fp) logging.config.dictConfig(LOGGING_CONFIG) client = Client(config) client.start()
def read_config(albumNum, trackNum): with open(CONFIG, 'r') as file: cfgfile = file.read() config = ConfigParser.RawConfigParser(allow_no_value=True) config.readfp(io.BytesIO(cfgfile)) # list all contents #logger.info('List all contents') #logger.info('Sections: {}'.format(config.sections())) #for section in config.sections(): # logger.info('Section: {}'.format(section)) # logger.info('Options: {}'.format(config.options(section))) # for option in config.options(section): # val = config.get(section, option) # if val == -1: # logger.warning('skip: {}'.format(option)) # logger.info('read config: {} {} {}'.format(section, option, val)) try: albumNum = config.getint('cdc', 'album') trackNum = config.getint('cdc', 'track') logger.info('read config album: {}, track: {}'.format(albumNum, trackNum)) except: logger.warning('can\'t read config file') return [albumNum, trackNum]
def most_similar_expert_project(self, topProjects, filterParams, expertTopN): file = open("config.ini", 'r') config = ConfigParser.ConfigParser() config.readfp(file) LEN = int(config.get('global', 'len')) # 对于一个专家要计算多少他的成果 COE = float(config.get('global', 'coe')) # 对于一个专家,从第二个的成果相似度乘的系数 topDocs = {} topDocs['project'] = self.filter('project', topProjects, filterParams, 15) expertMap, expertInfoOut = self.getSimExpertsIds( topDocs) # 专家id为key,各项成果的相似度list为value expertScoreMap = {} # 专家为key,评分为value for expert in expertMap: expertMap[expert].sort(reverse=True) sim = expertMap[expert][0] for i in range(1, len(expertMap[expert])): if i >= LEN: break sim = sim + COE * expertMap[expert][i] expertScoreMap[expert] = sim result = sorted(expertScoreMap.items(), key=lambda item: item[1], reverse=True)[0:expertTopN] out = [] for i in result: if i[0] in expertInfoOut: out.append({i[0]: expertInfoOut[i[0]]}) # out[i[0]]=expertInfoOut[i[0]] # self.printOut(out,LEN) return result
def __init__(self, configdir): ''' Initialize settings from a configfile ''' self.configdir = configdir self.logconf = os.path.abspath("%s/logging.conf" % self.configdir) # If logger.conf does not exist then create it with some defaults. if not os.path.isfile(self.logconf): self.writeConfig() try: logging.config.fileConfig(self.logconf) # Disable logging if no log handlers are found config = ConfigParser.ConfigParser() config.readfp(open(self.logconf)) handlers = config.get("logger_root", "handlers") if handlers is "": logging.disable(logging.INFO) logging.info('Logger initialized.') except socket.error: sys.stderr.write('Logger failed to initialize\n')
def parse_channel_config_file(file_path): config = ConfigParser() with codecs.open(file_path, 'r', encoding='utf-8') as f: config.readfp(f) channel_list = [] # validate and parse the config file if not config.sections(): print >> sys.stderr, "no section in config file!" sys.exit(1) for section in config.sections(): channel_info = {} OPTIONS = (u'channel_name', u'channel_description', u'rss_url') for option in OPTIONS: if not config.has_option(section, option): print >> sys.stderr, "no option [%s] in section [%s]!" % ( option, section) sys.exit(1) channel_info[option] = config.get(section, option) channel_list.append(channel_info) return channel_list
def parse_config(self, arguments=None): finalconfig = {} args = self.parser.parse_args(arguments) config = configparser.SafeConfigParser() try: with open(args.config) as fdconfig: config.readfp(fdconfig) except Exception as e: msg = "Ignoring configuration file '%s'" self.logger.warn(msg % (args.config)) for section in self.PARAMETERS.keys(): config.add_section(section) else: self.logger.info("Read configuration file '%s'" % args.config) for section in self.PARAMETERS.keys(): cfgsection = dict(config.items(section)) for var, required in self.PARAMETERS[section].iteritems(): try: # build env variables like IRONIC_URL envparameter = section.upper() + '_' + var.upper() cfgsection[var] = os.environ[envparameter] msg = "Reading env variable '%s'" % envparameter self.logger.debug(msg) except: pass if required and not var in cfgsection: msg = "Variable '%s.%s' not defined and it is required!" msg = msg % (section, var) self.logger.error(msg) raise ValueError(msg) finalconfig[section] = cfgsection self.args = args return finalconfig
def get_service_configuration(configfile_handler=None): """I set rdfrest Service default configuration options and possibly override them with the values extracted from a configuration file. :param configfile_handler: optional handler of a configuration file :return: Configuration object. """ # When allow_no_value=True is passed, options without values return None # The value must be used as flags i.e # [rdf_database] # repository # and not : # repository = # which will return an empty string whatever 'allow_no_value' value is set config = SafeConfigParser(allow_no_value=True) # Setting default values config.add_section("server") config.set("server", "host-name", "localhost") config.set("server", "port", "8001") config.set("server", "base-path", "") config.set("server", "force-ipv4", "false") config.set("server", "max-bytes", "-1") config.set("server", "no-cache", "false") config.set("server", "flash-allow", "false") config.set("server", "max-triples", "-1") config.set("server", "cors-allow-origin", "") config.set("server", "resource-cache", "false") config.add_section("ns_prefix") # A future specification section "httpd" or "wsgi" # may be needed for HttpFrontend # config.add_section('httpd') config.add_section("plugins") config.set("plugins", "post_via_get", "false") # TODO : optional plugin specific configuration # config.add_section('post_via_get') config.add_section("rdf_database") config.set("rdf_database", "repository", "") config.set("rdf_database", "force-init", "false") config.add_section("logging") config.set("logging", "loggers", "") config.set("logging", "console-level", "INFO") # No filename implies no logging to file config.set("logging", "filename", "") config.set("logging", "file-level", "INFO") config.set("logging", "json-configuration-filename", "logging.json") # Loading from config file if configfile_handler is not None: config.readfp(configfile_handler) return config
def get_service_configuration(configfile_handler=None): """I set rdfrest Service default configuration options and possibly override them with the values extracted from a configuration file. :param configfile_handler: optional handler of a configuration file :return: Configuration object. """ # When allow_no_value=True is passed, options without values return None # The value must be used as flags i.e # [rdf_database] # repository # and not : # repository = # which will return an empty string whatever 'allow_no_value' value is set config = SafeConfigParser(allow_no_value=True) # Setting default values config.add_section('server') config.set('server', 'host-name', 'localhost') config.set('server', 'port', '8001') config.set('server', 'base-path', '') config.set('server', 'force-ipv4', 'false') config.set('server', 'max-bytes', '-1') config.set('server', 'no-cache', 'false') config.set('server', 'flash-allow', 'false') config.set('server', 'max-triples', '-1') config.set('server', 'cors-allow-origin', '') config.set('server', 'resource-cache', 'false') config.add_section('ns_prefix') # A future specification section "httpd" or "wsgi" # may be needed for HttpFrontend #config.add_section('httpd') config.add_section('plugins') config.set('plugins', 'post_via_get', 'false') # TODO : optional plugin specific configuration #config.add_section('post_via_get') config.add_section('rdf_database') config.set('rdf_database', 'repository', '') config.set('rdf_database', 'force-init', 'false') config.add_section('logging') config.set('logging', 'loggers', '') config.set('logging', 'console-level', 'INFO') # No filename implies no logging to file config.set('logging', 'filename', '') config.set('logging', 'file-level', 'INFO') config.set('logging', 'json-configuration-filename', 'logging.json') # Loading from config file if configfile_handler is not None: config.readfp(configfile_handler) return config
def read_config(): config = configparser.RawConfigParser(allow_no_value=True) config.readfp(io.StringIO(DEFAULT_CONFIGURATION)) if 'PYCON_DEMO_CONFIG' in os.environ.keys(): config.read(os.environ['PYCON_DEMO_CONFIG']) return config
def parse_config(config_fn): ext = os.path.splitext(config_fn)[1] if ext in ('.json', '.js'): jdict = json.loads(open(config_fn).read()) config = dict2config(jdict, "General") else: config = ConfigParser.ConfigParser() config.readfp(open(config_fn)) return config
def collate_configs(filenames, defaults): _log.debug("Loading configuration files: %r", filenames) config = SafeConfigParser(defaults) for filename in filenames: with open(filename, 'r') as config_file: config.readfp(config_file) return config
def _read_config(): global config config = SafeConfigParser() baseDir = os.path.dirname(__file__) config_ini = os.path.join(baseDir, 'config.ini') local_ini = os.path.join(baseDir, 'local.ini') config.readfp(open(config_ini)) config.read([local_ini])
def get_service_configuration(configfile_handler=None): """I set rdfrest Service default configuration options and possibly override them with the values extracted from a configuration file. :param configfile_handler: optional handler of a configuration file :return: Configuration object. """ # When allow_no_value=True is passed, options without values return None # The value must be used as flags i.e # [rdf_database] # repository # and not : # repository = # which will return an empty string whatever 'allow_no_value' value is set config = SafeConfigParser(allow_no_value=True) # Setting default values config.add_section('server') config.set('server', 'host-name', 'localhost') config.set('server', 'port', '8001') config.set('server', 'threads', '2') config.set('server', 'base-path', '') config.set('server', 'force-ipv4', 'false') config.set('server', 'max-bytes', '-1') config.set('server', 'flash-allow', 'false') config.set('server', 'max-triples', '-1') config.set('server', 'cors-allow-origin', '') config.set('server', 'reset-connection', 'false') config.set('server', 'send-traceback', 'false') config.add_section('ns_prefix') # A future specification section "httpd" or "wsgi" # may be needed for HttpFrontend #config.add_section('httpd') config.add_section('plugins') config.set('plugins', 'post_via_get', 'false') # TODO : optional plugin specific configuration #config.add_section('post_via_get') config.add_section('rdf_database') config.set('rdf_database', 'repository', '') config.set('rdf_database', 'force-init', 'false') config.add_section('logging') config.set('logging', 'loggers', '') # Loading from config file if configfile_handler is not None: config.readfp(configfile_handler) return config
def get_logging_config(cfg_path=None): logging_config = { 'log_level': None, 'collector_log_file': '/var/log/datadog/collector.log', 'forwarder_log_file': '/var/log/datadog/forwarder.log', 'dogstatsd_log_file': '/var/log/datadog/dogstatsd.log', 'pup_log_file': '/var/log/datadog/pup.log', 'log_to_syslog': True, 'syslog_host': None, 'syslog_port': None, } config_path = get_config_path(cfg_path, os_name=getOS()) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) if config.has_section('handlers') or config.has_section('loggers') or config.has_section('formatters'): sys.stderr.write("Python logging config is no longer supported and will be ignored.\nTo configure logging, update the logging portion of 'datadog.conf' to match:\n 'https://github.com/DataDog/dd-agent/blob/master/datadog.conf.example'.\n") for option in logging_config: if config.has_option('Main', option): logging_config[option] = config.get('Main', option) levels = { 'CRITICAL': logging.CRITICAL, 'DEBUG': logging.DEBUG, 'ERROR': logging.ERROR, 'FATAL': logging.FATAL, 'INFO': logging.INFO, 'WARN': logging.WARN, 'WARNING': logging.WARNING, } if config.has_option('Main', 'log_level'): logging_config['log_level'] = levels.get(config.get('Main', 'log_level')) if config.has_option('Main', 'log_to_syslog'): logging_config['log_to_syslog'] = config.get('Main', 'log_to_syslog').strip().lower() in ['yes', 'true', 1] if config.has_option('Main', 'syslog_host'): host = config.get('Main', 'syslog_host').strip() if host: logging_config['syslog_host'] = host else: logging_config['syslog_host'] = None if config.has_option('Main', 'syslog_port'): port = config.get('Main', 'syslog_port').strip() try: logging_config['syslog_port'] = int(port) except: logging_config['syslog_port'] = None return logging_config
def __call__(self, parser, namespace, values, option_string=None): file_location = values if values else self.default config = ConfigParser.ConfigParser() try: with open(file_location) as fp: config.readfp(fp) except (IOError, ConfigParser.Error) as e: raise argparse.ArgumentError( self, "Unable to read URL file: {}".format(e)) setattr(namespace, self.dest, config)
def __init__(self, configfname, problem, codefun, agentclass): config = ConfigParser.ConfigParser() config.readfp(open(configfname)) logging.config.fileConfig(config.get('default','logconf')) log.info('Setting up evolutionary workbench...') self.evolog.critical(evologhead) self.problem = problem self.codefun = codefun self.popsize = config.getint('default','popsize') self.parentpsize = config.getint('default','parentpopsize') self.maxiters = config.getint('default','numiters') self.popratio = self.popsize / self.parentpsize opnames = config.get('default','operators') oprates = config.get('default','oprates') self.opargs = config.get('default','opargs').split(',') self.ops_, self.oprates = _initialize_ops(opnames,oprates) log.debug(self.ops_) log.debug(self.oprates) arncfg = config.get('default','arnconf') self.arnconfig = ConfigParser.ConfigParser() self.arnconfig.readfp(open(arncfg)) self.agentclass = partial(agentclass, config = self.arnconfig) self.mutrate = config.getfloat('default','mutrate') self.orig_mutrate = self.mutrate self.mutate_ = partial(bitflipmutation, mutrate = self.mutrate) self.improves = 0 self.tempevals = 0 self.adfcount = 0 self.localsearch = config.get('default','localsearch') if self.localsearch: log.info('Initializing local search holder') mainmod = __import__('__main__') self.localsearch = getattr(mainmod, self.localsearch)(5,codefun) self.basicadf = config.get('default','adf') if self.basicadf: log.info('Initializing multiplex adf skeleton') mainmod = __import__('__main__') self.basicadf = getattr(mainmod, self.basicadf) self.numevals = None self.population = None self.parents = None self.best = None self.itercount = None
def __init__(self, method): """ 输入 test 或者是 real :param method: :return: """ self.method = method if method != 'test' and method != "production" and method != "local": exit("please init config with test or real") config = ConfigParser.ConfigParser() config.readfp(open("{0}/../conf/{1}/db.conf".format(root, method))) self.config = config self.init_log_handle()
def load_config(defaults, files): config = ConfigParser.ConfigParser() with open(defaults, 'r') as f: config.readfp(f) config.read(files) if 'ACCOUNTING_CLIENT_DIR' in os.environ: # hack to make it possible to find client dir in buildbot if config.has_section('accounting'): config.set('accounting', 'client_dir', os.environ['ACCOUNTING_CLIENT_DIR']) return config
def __init__(self, configfile=None): self.verbose = False self.market = "poloniex" self.api_key = None self.api_secret = None if configfile: logging.config.fileConfig(configfile.name) config = configparser.ConfigParser() config.readfp(configfile) exchange = config.get("DEFAULT", "exchange") self.api_key = config.get(exchange, "api_key") self.api_secret = config.get(exchange, "api_secret")
def read_conf(path=u"client.conf"): """ 加载配置 :param path: 配置文件路径 :return: ConfigParser """ if not os.path.exists(path): logger.error(u"没有找到配置文件:\"client.conf\" !") sys.exit(2) config = SafeConfigParser() with codecs.open(path, u"rb", encoding=u"utf8") as c_file: config.readfp(c_file) return config
def loadConfig(configFile): # Charger le fichier de configuration with open(configFile) as f: sample_config = f.read() config = ConfigParser.RawConfigParser(allow_no_value=True) config.readfp(io.BytesIO(sample_config)) # List all contents if (False): print("Parametrage du fichier de configuration :"+configFile) for section in config.sections(): print("[%s]" % section) for options in config.options(section): print("\t%s = %s" % (options, config.get(section, options))) return config
class AppLogger: """ Application log """ working_dir = os.path.dirname(__file__).replace('libcommon','') __log_single = None __log_ini_file_temp = os.path.join(working_dir,'conf/log/log_config_temp.conf') __log_ini_file = os.path.join(working_dir,'conf/log/log_config.conf') filecontent = open(__log_ini_file_temp,'rb') config = ConfigParser.ConfigParser() config.readfp(filecontent) filecontent.close() config.set("handler_file","args",(os.path.join(working_dir,'log/app.log'),'d',1,'%Y-%m-%d')) configContent = open(__log_ini_file,'wb') config.write(configContent) configContent.close() def __init__(self): if not AppLogger.__log_single: try: logging.config.fileConfig(AppLogger.__log_ini_file) except: pass AppLogger.__log_single = logging.getLogger(None) def getHandler(self): return AppLogger.__log_single
def parse_cfg(cfg_file): with open(cfg_file) as cfg: test_parameters = dict() data_args = dict() descriptors = [] flann_args = dict() config = ConfigParser.RawConfigParser() config.readfp(cfg) sections = config.sections() for section in sections: if section == 'Test': for name, value in config.items(section): if name == 'batch_size': if value == 'None': test_parameters[name] = None else: test_parameters[name] = int(value) else: test_parameters[name] = value elif section == 'Dataset': for name, value in config.items(section): if name == 'classes': data_args[name] = value.split(',') elif name == 'trainsize' or name == 'testsize' or \ name == 'no_classes': data_args[name] = int(value) else: data_args[name] = value elif section == 'Flann': for name, value in config.items(section): if name in ['k','checks','trees']: flann_args[name] = int(value) else: flann_args[name] = value else: d = [section, dict()] for name, value in config.items(section): if name=='alpha': d[1][name] = float(value) elif name=='verbose': d[1][name] = value=='True' else: d[1][name] = value descriptors.append(d) return test_parameters, data_args, descriptors, flann_args
def load_patterns(fpath): patterns = {} config = ConfigParser() with open(fpath) as f: config.readfp(f) for ind_type in config.sections(): try: ind_pattern = config.get(ind_type, 'pattern') except: continue if ind_pattern: ind_regex = re.compile(ind_pattern) patterns[ind_type] = ind_regex return patterns
def _setup_app( default_path='/etc/eie_config/eieldap.cfg', env_key='RESOURCE_LOG_CFG' ): """Setup application configuration """ config = SafeConfigParser() path = default_path if os.path.exists(path): with open(path) as source: config.readfp(source) else: error_msg = "{} does not exist".format(path) logging.error(error_msg) raise IOError(error_msg) return config
def load_server_config(): for loc in os.curdir, os.path.expanduser("~"), "/etc/sqrgraph": try: with open(os.path.join(loc, "server.conf")) as source: config = ConfigParser.RawConfigParser(allow_no_value=True) config.readfp(source) neo4j_conf = ServerConfig(**config._sections["neo4j"]) log.info("Loaded neo4j configurations %r", neo4j_conf.__dict__) couchbase_conf = ServerConfig(**config._sections["couchbase"]) log.info("Loaded couchbase configurations %r", couchbase_conf.__dict__) return neo4j_conf, couchbase_conf except IOError: pass raise ValueError("Connection configuration file not found")
def set_syslog_logger(self, enabled): config = ConfigParser.ConfigParser() config.readfp(open(self.logconf)) handlers = config.get("logger_root", "handlers") handler_list = handlers.split(',') if enabled: new_list = "syslogHandler," else: new_list = "" for handler in handler_list: if handler == "syslogHandler": continue new_list += handler + "," new_list = new_list.rstrip(',') config.set("logger_root", "handlers", new_list) with open(self.logconf, 'w') as configfile: config.write(configfile)
class Settings: config_file_path = CONFIG_FILE_PATH config = configparser.ConfigParser() config.readfp(io.open(config_file_path, encoding="utf8")) env = os.getenv("REPORT_ENV", 'dev') def __init__(self, env=ENV): self.env = env def __getitem__(self, item): return self.config.get(self.env, item)
def __parse_config_file(file = None): """ Parse the configuration file :param file: Configuration file to parse :type file: String """ config = ConfigParser.ConfigParser() if file is not None: # If config file, load Logger config Config._log = logging logging.config.fileConfig(file) config.readfp(open(file)) else: # Set defaults config.add_section("app:main") config.set("app:main","db_uri","mongodb://localhost") config.set("app:main","db_name","mobyle") Config._config = config
def _read_config(self, config_path): """Read config. Exit on invalid config. """ self.log.debug("Reading config...") conf = {} defaults = { 'interval': '3600', 'timeout': '16', 'redirects': '2', 'ip_mode': 'random', 'loglevel': 'WARNING', } config = SafeConfigParser(defaults=defaults) try: # Check if config is even readable f = open(path.expanduser(config_path), 'r') # Read config config.readfp(f) f.close() conf['user'] = config.get('general', 'user') conf['token'] = config.get('general', 'token') conf['url'] = self._is_url(config.get('general', 'host_url')) conf['interval'] = config.getfloat('general', 'interval') conf['timeout'] = config.getfloat('general', 'timeout') conf['redirects'] = config.getint('general', 'redirects') conf['ip_mode'] = self._is_mode(config.get('ip_service', 'mode')) conf['ip_url'] = self._is_url(config.get('ip_service', 'ip_urls')) conf['loglevel'] = config.get('logging', 'level', fallback='WARNING') except (MissingSectionHeaderError, NoSectionError, NoOptionError, ValueError, IOError) as e: self.log.critical("Configuration error: %s" % e) exit(1) return conf
def main(argv=None): ''' Step 1: Load config file ''' if not argv: argv = sys.argv[1:] args = docopt(__doc__, argv, version=stix2cif_version) try: if args.get('<config>') == None: config_file = "stix2cif_config.cfg" else: config_file = args.get('<config>') cf = open(config_file, 'r') except IOError as err: print 'Cannot open configuration file: ' + str(err), sys.stderr sys.exit(1) config = Stix_CIF_Config.StixCIF_ConfigParser() with cf: config.readfp(cf) try: driver = Driver(config) except Exception as ex: print 'Could not initialize driver: ' + str(ex),sys.stderr sys.exit(2) logging.config.fileConfig(config_file, disable_existing_loggers=False) logger = logging.getLogger(__name__) logger.info('Starting Driver...') try: driver.run() except: logger.exception("Error during file processing, exiting ...") logger.info('Finished Driver, exiting.')
def parse_config(self, filename): if not filename: return self.path = os.path.realpath(filename) try: logging.config.fileConfig(self.path) except: pass try: config = ConfigParser.ConfigParser() with open(self.path) as f: config.readfp(f) self.configs.insert(0, config) except: log.debug("Cannot parse config file %s", filename) return None else: return self.configs[0]
def load_configuration(defaults=None, config_path=None): """ Load configuration. """ config = ConfigParser.RawConfigParser() # Defaults, if any. if defaults is not None: with contextlib.closing(stringio.StringIO(defaults)) as fp: config.readfp(fp) # Main configuration file, if any. if config_path is None: # Ensure logging is at least configured to a minimum level. logging.basicConfig() else: config.read(config_path) logging.config.fileConfig(config_path) # Load in any additional config files. if config.has_option('include', 'files'): config.read(glob.glob(config.get('include', 'files'))) return config
def get_config(parse_args=True, cfg_path=None, options=None): if parse_args: options, _ = get_parsed_args() # General config agentConfig = { 'check_freq': DEFAULT_CHECK_FREQUENCY, 'dogstatsd_port': 8125, 'dogstatsd_target': 'http://localhost:17123', 'graphite_listen_port': None, 'hostname': None, 'listen_port': None, 'tags': None, 'use_ec2_instance_id': False, # DEPRECATED 'version': get_version(), 'watchdog': True, 'additional_checksd': '/etc/sd-agent/checks.d/', 'bind_host': get_default_bind_host(), 'statsd_metric_namespace': None, 'utf8_decoding': False } if Platform.is_mac(): agentConfig['additional_checksd'] = '/usr/local/etc/sd-agent/checks.d/' # Config handling try: # Find the right config file path = os.path.realpath(__file__) path = os.path.dirname(path) config_path = get_config_path(cfg_path, os_name=get_os()) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) # bulk import for option in config.options('Main'): agentConfig[option] = config.get('Main', option) # Store developer mode setting in the agentConfig if config.has_option('Main', 'developer_mode'): agentConfig['developer_mode'] = _is_affirmative( config.get('Main', 'developer_mode')) # Allow an override with the --profile option if options is not None and options.profile: agentConfig['developer_mode'] = True # # Core config # # FIXME unnecessarily complex if config.has_option('Main', 'sd_account'): agentConfig['sd_account'] = config.get('Main', 'sd_account') agentConfig['use_forwarder'] = False if options is not None and options.use_forwarder: listen_port = 17124 if config.has_option('Main', 'listen_port'): listen_port = int(config.get('Main', 'listen_port')) agentConfig['sd_url'] = "http://" + agentConfig[ 'bind_host'] + ":" + str(listen_port) agentConfig['use_forwarder'] = True elif options is not None and not options.disable_sd and options.sd_url: agentConfig['sd_url'] = options.sd_url elif config.has_option('Main', 'sd_url'): agentConfig['sd_url'] = config.get('Main', 'sd_url') else: # Default agent URL agentConfig['sd_url'] = "https://" + agentConfig[ 'sd_account'] + ".agent.serverdensity.io" if agentConfig['sd_url'].endswith('/'): agentConfig['sd_url'] = agentConfig['sd_url'][:-1] # Extra checks.d path # the linux directory is set by default if config.has_option('Main', 'additional_checksd'): agentConfig['additional_checksd'] = config.get( 'Main', 'additional_checksd') elif get_os() == 'windows': # default windows location common_path = _windows_commondata_path() agentConfig['additional_checksd'] = os.path.join( common_path, 'ServerDensity', 'checks.d') if config.has_option('Main', 'use_dogstatsd'): agentConfig['use_dogstatsd'] = config.get( 'Main', 'use_dogstatsd').lower() in ("yes", "true") else: agentConfig['use_dogstatsd'] = True # Service discovery if config.has_option('Main', 'service_discovery_backend'): try: additional_config = extract_agent_config(config) agentConfig.update(additional_config) except: log.error('Failed to load the agent configuration related to ' 'service discovery. It will not be used.') # Concerns only Windows if config.has_option('Main', 'use_web_info_page'): agentConfig['use_web_info_page'] = config.get( 'Main', 'use_web_info_page').lower() in ("yes", "true") else: agentConfig['use_web_info_page'] = True # Which agent key to use agentConfig['agent_key'] = config.get('Main', 'agent_key') # local traffic only? Default to no agentConfig['non_local_traffic'] = False if config.has_option('Main', 'non_local_traffic'): agentConfig['non_local_traffic'] = config.get( 'Main', 'non_local_traffic').lower() in ("yes", "true") # DEPRECATED if config.has_option('Main', 'use_ec2_instance_id'): use_ec2_instance_id = config.get('Main', 'use_ec2_instance_id') # translate yes into True, the rest into False agentConfig['use_ec2_instance_id'] = ( use_ec2_instance_id.lower() == 'yes') if config.has_option('Main', 'check_freq'): try: agentConfig['check_freq'] = int( config.get('Main', 'check_freq')) except Exception: pass # Custom histogram aggregate/percentile metrics if config.has_option('Main', 'histogram_aggregates'): agentConfig['histogram_aggregates'] = get_histogram_aggregates( config.get('Main', 'histogram_aggregates')) if config.has_option('Main', 'histogram_percentiles'): agentConfig['histogram_percentiles'] = get_histogram_percentiles( config.get('Main', 'histogram_percentiles')) # Disable Watchdog (optionally) if config.has_option('Main', 'watchdog'): if config.get('Main', 'watchdog').lower() in ('no', 'false'): agentConfig['watchdog'] = False # Optional graphite listener if config.has_option('Main', 'graphite_listen_port'): agentConfig['graphite_listen_port'] = \ int(config.get('Main', 'graphite_listen_port')) else: agentConfig['graphite_listen_port'] = None # Dogstatsd config dogstatsd_defaults = { 'dogstatsd_port': 8125, 'dogstatsd_target': 'http://' + agentConfig['bind_host'] + ':17123', } for key, value in dogstatsd_defaults.iteritems(): if config.has_option('Main', key): agentConfig[key] = config.get('Main', key) else: agentConfig[key] = value # Create app:xxx tags based on monitored apps agentConfig['create_dd_check_tags'] = config.has_option('Main', 'create_dd_check_tags') and \ _is_affirmative(config.get('Main', 'create_dd_check_tags')) # Forwarding to external statsd server if config.has_option('Main', 'statsd_forward_host'): agentConfig['statsd_forward_host'] = config.get( 'Main', 'statsd_forward_host') if config.has_option('Main', 'statsd_forward_port'): agentConfig['statsd_forward_port'] = int( config.get('Main', 'statsd_forward_port')) # optionally send dogstatsd data directly to the agent. if config.has_option('Main', 'dogstatsd_use_ddurl'): if _is_affirmative(config.get('Main', 'dogstatsd_use_ddurl')): agentConfig['dogstatsd_target'] = agentConfig['sd_url'] # Optional config # FIXME not the prettiest code ever... if config.has_option('Main', 'use_mount'): agentConfig['use_mount'] = _is_affirmative( config.get('Main', 'use_mount')) if options is not None and options.autorestart: agentConfig['autorestart'] = True elif config.has_option('Main', 'autorestart'): agentConfig['autorestart'] = _is_affirmative( config.get('Main', 'autorestart')) if config.has_option('Main', 'check_timings'): agentConfig['check_timings'] = _is_affirmative( config.get('Main', 'check_timings')) if config.has_option('Main', 'exclude_process_args'): agentConfig['exclude_process_args'] = _is_affirmative( config.get('Main', 'exclude_process_args')) try: filter_device_re = config.get('Main', 'device_blacklist_re') agentConfig['device_blacklist_re'] = re.compile(filter_device_re) except ConfigParser.NoOptionError: pass if config.has_option("Main", "nagios_perf_cfg"): agentConfig["nagios_perf_cfg"] = config.get( "Main", "nagios_perf_cfg") if config.has_option("Main", "use_curl_http_client"): agentConfig["use_curl_http_client"] = _is_affirmative( config.get("Main", "use_curl_http_client")) else: # Default to False as there are some issues with the curl client and ELB agentConfig["use_curl_http_client"] = False if config.has_section('WMI'): agentConfig['WMI'] = {} for key, value in config.items('WMI'): agentConfig['WMI'][key] = value if (config.has_option("Main", "limit_memory_consumption") and config.get("Main", "limit_memory_consumption") is not None): agentConfig["limit_memory_consumption"] = int( config.get("Main", "limit_memory_consumption")) else: agentConfig["limit_memory_consumption"] = None if config.has_option("Main", "skip_ssl_validation"): agentConfig["skip_ssl_validation"] = _is_affirmative( config.get("Main", "skip_ssl_validation")) agentConfig["collect_instance_metadata"] = True if config.has_option("Main", "collect_instance_metadata"): agentConfig["collect_instance_metadata"] = _is_affirmative( config.get("Main", "collect_instance_metadata")) agentConfig["proxy_forbid_method_switch"] = False if config.has_option("Main", "proxy_forbid_method_switch"): agentConfig["proxy_forbid_method_switch"] = _is_affirmative( config.get("Main", "proxy_forbid_method_switch")) agentConfig["collect_ec2_tags"] = False if config.has_option("Main", "collect_ec2_tags"): agentConfig["collect_ec2_tags"] = _is_affirmative( config.get("Main", "collect_ec2_tags")) agentConfig["utf8_decoding"] = False if config.has_option("Main", "utf8_decoding"): agentConfig["utf8_decoding"] = _is_affirmative( config.get("Main", "utf8_decoding")) agentConfig["gce_updated_hostname"] = False if config.has_option("Main", "gce_updated_hostname"): agentConfig["gce_updated_hostname"] = _is_affirmative( config.get("Main", "gce_updated_hostname")) except ConfigParser.NoSectionError, e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2)
def get_logging_config(cfg_path=None): system_os = get_os() logging_config = { 'log_level': None, 'log_to_event_viewer': False, 'log_to_syslog': False, 'syslog_host': None, 'syslog_port': None, } if system_os == 'windows': logging_config['windows_collector_log_file'] = os.path.join( _windows_commondata_path(), 'ServerDensity', 'logs', 'collector.log') logging_config['windows_forwarder_log_file'] = os.path.join( _windows_commondata_path(), 'ServerDensity', 'logs', 'forwarder.log') logging_config['windows_dogstatsd_log_file'] = os.path.join( _windows_commondata_path(), 'ServerDensity', 'logs', 'dogstatsd.log') logging_config['jmxfetch_log_file'] = os.path.join( _windows_commondata_path(), 'ServerDensity', 'logs', 'jmxfetch.log') else: logging_config[ 'collector_log_file'] = '/var/log/sd-agent/collector.log' logging_config[ 'forwarder_log_file'] = '/var/log/sd-agent/forwarder.log' logging_config[ 'dogstatsd_log_file'] = '/var/log/sd-agent/dogstatsd.log' logging_config['jmxfetch_log_file'] = '/var/log/sd-agent/jmxfetch.log' logging_config['go-metro_log_file'] = '/var/log/sd-agent/go-metro.log' logging_config['log_to_syslog'] = True config_path = get_config_path(cfg_path, os_name=system_os) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) if config.has_section('handlers') or config.has_section( 'loggers') or config.has_section('formatters'): if system_os == 'windows': config_example_file = "https://github.com/serverdensity/sd-agent/blob/master/packaging/sd-agent/win32/install_files/config_win32.conf" else: config_example_file = "https://github.com/serverdensity/sd-agent/blob/master/config.cfg.example" sys.stderr.write( """Python logging config is no longer supported and will be ignored. To configure logging, update the logging portion of 'config.cfg' to match: '%s'. """ % config_example_file) for option in logging_config: if config.has_option('Main', option): logging_config[option] = config.get('Main', option) levels = { 'CRITICAL': logging.CRITICAL, 'DEBUG': logging.DEBUG, 'ERROR': logging.ERROR, 'FATAL': logging.FATAL, 'INFO': logging.INFO, 'WARN': logging.WARN, 'WARNING': logging.WARNING, } if config.has_option('Main', 'log_level'): logging_config['log_level'] = levels.get( config.get('Main', 'log_level')) if config.has_option('Main', 'log_to_syslog'): logging_config['log_to_syslog'] = config.get( 'Main', 'log_to_syslog').strip().lower() in ['yes', 'true', 1] if config.has_option('Main', 'log_to_event_viewer'): logging_config['log_to_event_viewer'] = config.get( 'Main', 'log_to_event_viewer').strip().lower() in ['yes', 'true', 1] if config.has_option('Main', 'syslog_host'): host = config.get('Main', 'syslog_host').strip() if host: logging_config['syslog_host'] = host else: logging_config['syslog_host'] = None if config.has_option('Main', 'syslog_port'): port = config.get('Main', 'syslog_port').strip() try: logging_config['syslog_port'] = int(port) except Exception: logging_config['syslog_port'] = None if config.has_option('Main', 'disable_file_logging'): logging_config['disable_file_logging'] = config.get( 'Main', 'disable_file_logging').strip().lower() in ['yes', 'true', 1] else: logging_config['disable_file_logging'] = False return logging_config
def get_config(parse_args=True, cfg_path=None, options=None): if parse_args: options, _ = get_parsed_args() # General config agentConfig = { 'check_freq': DEFAULT_CHECK_FREQUENCY, 'dogstatsd_port': 8125, 'dogstatsd_target': 'http://localhost:17123', 'graphite_listen_port': None, 'hostname': None, 'listen_port': None, 'tags': None, 'use_ec2_instance_id': False, # DEPRECATED 'version': get_version(), 'watchdog': True, 'additional_checksd': '/etc/dd-agent/checks.d/', 'bind_host': get_default_bind_host(), 'statsd_metric_namespace': None, 'utf8_decoding': False } if Platform.is_mac(): agentConfig['additional_checksd'] = '/opt/datadog-agent/etc/checks.d' # Config handling try: # Find the right config file path = os.path.realpath(__file__) path = os.path.dirname(path) config_path = get_config_path(cfg_path, os_name=get_os()) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) # bulk import for option in config.options('Main'): agentConfig[option] = config.get('Main', option) # Store developer mode setting in the agentConfig if config.has_option('Main', 'developer_mode'): agentConfig['developer_mode'] = _is_affirmative(config.get('Main', 'developer_mode')) # Allow an override with the --profile option if options is not None and options.profile: agentConfig['developer_mode'] = True # # Core config #ap if not config.has_option('Main', 'api_key'): log.warning(u"No API key was found. Aborting.") sys.exit(2) if not config.has_option('Main', 'dd_url'): log.warning(u"No dd_url was found. Aborting.") sys.exit(2) # Endpoints dd_urls = map(clean_dd_url, config.get('Main', 'dd_url').split(',')) api_keys = map(lambda el: el.strip(), config.get('Main', 'api_key').split(',')) # For collector and dogstatsd agentConfig['dd_url'] = dd_urls[0] agentConfig['api_key'] = api_keys[0] # Forwarder endpoints logic # endpoints is: # { # 'https://app.datadoghq.com': ['api_key_abc', 'api_key_def'], # 'https://app.example.com': ['api_key_xyz'] # } endpoints = {} dd_urls = remove_empty(dd_urls) api_keys = remove_empty(api_keys) if len(dd_urls) == 1: if len(api_keys) > 0: endpoints[dd_urls[0]] = api_keys else: assert len(dd_urls) == len(api_keys), 'Please provide one api_key for each url' for i, dd_url in enumerate(dd_urls): endpoints[dd_url] = endpoints.get(dd_url, []) + [api_keys[i]] agentConfig['endpoints'] = endpoints # Forwarder or not forwarder agentConfig['use_forwarder'] = options is not None and options.use_forwarder if agentConfig['use_forwarder']: listen_port = 17123 if config.has_option('Main', 'listen_port'): listen_port = int(config.get('Main', 'listen_port')) agentConfig['dd_url'] = "http://{}:{}".format(agentConfig['bind_host'], listen_port) # FIXME: Legacy dd_url command line switch elif options is not None and options.dd_url is not None: agentConfig['dd_url'] = options.dd_url # Forwarder timeout agentConfig['forwarder_timeout'] = 20 if config.has_option('Main', 'forwarder_timeout'): agentConfig['forwarder_timeout'] = int(config.get('Main', 'forwarder_timeout')) # Extra checks.d path # the linux directory is set by default if config.has_option('Main', 'additional_checksd'): agentConfig['additional_checksd'] = config.get('Main', 'additional_checksd') elif get_os() == 'windows': # default windows location common_path = _windows_commondata_path() agentConfig['additional_checksd'] = os.path.join(common_path, 'Datadog', 'checks.d') if config.has_option('Main', 'use_dogstatsd'): agentConfig['use_dogstatsd'] = config.get('Main', 'use_dogstatsd').lower() in ("yes", "true") else: agentConfig['use_dogstatsd'] = True # Service discovery if config.has_option('Main', 'service_discovery_backend'): try: additional_config = extract_agent_config(config) agentConfig.update(additional_config) except: log.error('Failed to load the agent configuration related to ' 'service discovery. It will not be used.') # Concerns only Windows if config.has_option('Main', 'use_web_info_page'): agentConfig['use_web_info_page'] = config.get('Main', 'use_web_info_page').lower() in ("yes", "true") else: agentConfig['use_web_info_page'] = True # local traffic only? Default to no agentConfig['non_local_traffic'] = False if config.has_option('Main', 'non_local_traffic'): agentConfig['non_local_traffic'] = config.get('Main', 'non_local_traffic').lower() in ("yes", "true") # DEPRECATED if config.has_option('Main', 'use_ec2_instance_id'): use_ec2_instance_id = config.get('Main', 'use_ec2_instance_id') # translate yes into True, the rest into False agentConfig['use_ec2_instance_id'] = (use_ec2_instance_id.lower() == 'yes') if config.has_option('Main', 'check_freq'): try: agentConfig['check_freq'] = int(config.get('Main', 'check_freq')) except Exception: pass # Custom histogram aggregate/percentile metrics if config.has_option('Main', 'histogram_aggregates'): agentConfig['histogram_aggregates'] = get_histogram_aggregates(config.get('Main', 'histogram_aggregates')) if config.has_option('Main', 'histogram_percentiles'): agentConfig['histogram_percentiles'] = get_histogram_percentiles(config.get('Main', 'histogram_percentiles')) # Disable Watchdog (optionally) if config.has_option('Main', 'watchdog'): if config.get('Main', 'watchdog').lower() in ('no', 'false'): agentConfig['watchdog'] = False # Optional graphite listener if config.has_option('Main', 'graphite_listen_port'): agentConfig['graphite_listen_port'] = \ int(config.get('Main', 'graphite_listen_port')) else: agentConfig['graphite_listen_port'] = None # Dogstatsd config dogstatsd_defaults = { 'dogstatsd_port': 8125, 'dogstatsd_target': 'http://' + agentConfig['bind_host'] + ':17123', } for key, value in dogstatsd_defaults.iteritems(): if config.has_option('Main', key): agentConfig[key] = config.get('Main', key) else: agentConfig[key] = value # Create app:xxx tags based on monitored apps agentConfig['create_dd_check_tags'] = config.has_option('Main', 'create_dd_check_tags') and \ _is_affirmative(config.get('Main', 'create_dd_check_tags')) # Forwarding to external statsd server if config.has_option('Main', 'statsd_forward_host'): agentConfig['statsd_forward_host'] = config.get('Main', 'statsd_forward_host') if config.has_option('Main', 'statsd_forward_port'): agentConfig['statsd_forward_port'] = int(config.get('Main', 'statsd_forward_port')) # Optional config # FIXME not the prettiest code ever... if config.has_option('Main', 'use_mount'): agentConfig['use_mount'] = _is_affirmative(config.get('Main', 'use_mount')) if options is not None and options.autorestart: agentConfig['autorestart'] = True elif config.has_option('Main', 'autorestart'): agentConfig['autorestart'] = _is_affirmative(config.get('Main', 'autorestart')) if config.has_option('Main', 'check_timings'): agentConfig['check_timings'] = _is_affirmative(config.get('Main', 'check_timings')) if config.has_option('Main', 'exclude_process_args'): agentConfig['exclude_process_args'] = _is_affirmative(config.get('Main', 'exclude_process_args')) try: filter_device_re = config.get('Main', 'device_blacklist_re') agentConfig['device_blacklist_re'] = re.compile(filter_device_re) except ConfigParser.NoOptionError: pass # Dogstream config if config.has_option("Main", "dogstream_log"): # Older version, single log support log_path = config.get("Main", "dogstream_log") if config.has_option("Main", "dogstream_line_parser"): agentConfig["dogstreams"] = ':'.join([log_path, config.get("Main", "dogstream_line_parser")]) else: agentConfig["dogstreams"] = log_path elif config.has_option("Main", "dogstreams"): agentConfig["dogstreams"] = config.get("Main", "dogstreams") if config.has_option("Main", "nagios_perf_cfg"): agentConfig["nagios_perf_cfg"] = config.get("Main", "nagios_perf_cfg") if config.has_option("Main", "use_curl_http_client"): agentConfig["use_curl_http_client"] = _is_affirmative(config.get("Main", "use_curl_http_client")) else: # Default to False as there are some issues with the curl client and ELB agentConfig["use_curl_http_client"] = False if config.has_section('WMI'): agentConfig['WMI'] = {} for key, value in config.items('WMI'): agentConfig['WMI'][key] = value if (config.has_option("Main", "limit_memory_consumption") and config.get("Main", "limit_memory_consumption") is not None): agentConfig["limit_memory_consumption"] = int(config.get("Main", "limit_memory_consumption")) else: agentConfig["limit_memory_consumption"] = None if config.has_option("Main", "skip_ssl_validation"): agentConfig["skip_ssl_validation"] = _is_affirmative(config.get("Main", "skip_ssl_validation")) agentConfig["collect_instance_metadata"] = True if config.has_option("Main", "collect_instance_metadata"): agentConfig["collect_instance_metadata"] = _is_affirmative(config.get("Main", "collect_instance_metadata")) agentConfig["proxy_forbid_method_switch"] = False if config.has_option("Main", "proxy_forbid_method_switch"): agentConfig["proxy_forbid_method_switch"] = _is_affirmative(config.get("Main", "proxy_forbid_method_switch")) agentConfig["collect_ec2_tags"] = False if config.has_option("Main", "collect_ec2_tags"): agentConfig["collect_ec2_tags"] = _is_affirmative(config.get("Main", "collect_ec2_tags")) agentConfig["utf8_decoding"] = False if config.has_option("Main", "utf8_decoding"): agentConfig["utf8_decoding"] = _is_affirmative(config.get("Main", "utf8_decoding")) agentConfig["gce_updated_hostname"] = False if config.has_option("Main", "gce_updated_hostname"): agentConfig["gce_updated_hostname"] = _is_affirmative(config.get("Main", "gce_updated_hostname")) except ConfigParser.NoSectionError as e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.ParsingError as e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2) except ConfigParser.NoOptionError as e: sys.stderr.write('There are some items missing from your config file, but nothing fatal [%s]' % e) # Storing proxy settings in the agentConfig agentConfig['proxy_settings'] = get_proxy(agentConfig) if agentConfig.get('ca_certs', None) is None: agentConfig['ssl_certificate'] = get_ssl_certificate(get_os(), 'datadog-cert.pem') else: agentConfig['ssl_certificate'] = agentConfig['ca_certs'] return agentConfig
def get_logging_config(cfg_path=None): system_os = get_os() logging_config = { 'log_level': None, 'log_to_event_viewer': False, 'log_to_syslog': False, 'syslog_host': None, 'syslog_port': None, } if system_os == 'windows': logging_config['windows_collector_log_file'] = os.path.join(_windows_commondata_path(), 'Datadog', 'logs', 'collector.log') logging_config['windows_forwarder_log_file'] = os.path.join(_windows_commondata_path(), 'Datadog', 'logs', 'forwarder.log') logging_config['windows_dogstatsd_log_file'] = os.path.join(_windows_commondata_path(), 'Datadog', 'logs', 'dogstatsd.log') logging_config['jmxfetch_log_file'] = os.path.join(_windows_commondata_path(), 'Datadog', 'logs', 'jmxfetch.log') else: logging_config['collector_log_file'] = '/var/log/datadog/collector.log' logging_config['forwarder_log_file'] = '/var/log/datadog/forwarder.log' logging_config['dogstatsd_log_file'] = '/var/log/datadog/dogstatsd.log' logging_config['jmxfetch_log_file'] = '/var/log/datadog/jmxfetch.log' logging_config['go-metro_log_file'] = '/var/log/datadog/go-metro.log' logging_config['log_to_syslog'] = True config_path = get_config_path(cfg_path, os_name=system_os) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) if config.has_section('handlers') or config.has_section('loggers') or config.has_section('formatters'): if system_os == 'windows': config_example_file = "https://github.com/DataDog/dd-agent/blob/master/packaging/datadog-agent/win32/install_files/datadog_win32.conf" else: config_example_file = "https://github.com/DataDog/dd-agent/blob/master/datadog.conf.example" sys.stderr.write("""Python logging config is no longer supported and will be ignored. To configure logging, update the logging portion of 'datadog.conf' to match: '%s'. """ % config_example_file) for option in logging_config: if config.has_option('Main', option): logging_config[option] = config.get('Main', option) levels = { 'CRITICAL': logging.CRITICAL, 'DEBUG': logging.DEBUG, 'ERROR': logging.ERROR, 'FATAL': logging.FATAL, 'INFO': logging.INFO, 'WARN': logging.WARN, 'WARNING': logging.WARNING, } if config.has_option('Main', 'log_level'): logging_config['log_level'] = levels.get(config.get('Main', 'log_level')) if config.has_option('Main', 'log_to_syslog'): logging_config['log_to_syslog'] = config.get('Main', 'log_to_syslog').strip().lower() in ['yes', 'true', 1] if config.has_option('Main', 'log_to_event_viewer'): logging_config['log_to_event_viewer'] = config.get('Main', 'log_to_event_viewer').strip().lower() in ['yes', 'true', 1] if config.has_option('Main', 'syslog_host'): host = config.get('Main', 'syslog_host').strip() if host: logging_config['syslog_host'] = host else: logging_config['syslog_host'] = None if config.has_option('Main', 'syslog_port'): port = config.get('Main', 'syslog_port').strip() try: logging_config['syslog_port'] = int(port) except Exception: logging_config['syslog_port'] = None if config.has_option('Main', 'disable_file_logging'): logging_config['disable_file_logging'] = config.get('Main', 'disable_file_logging').strip().lower() in ['yes', 'true', 1] else: logging_config['disable_file_logging'] = False return logging_config
def get_config(parse_args=True, cfg_path=None, options=None): if parse_args: options, _ = get_parsed_args() # General config agentConfig = { 'check_freq': DEFAULT_CHECK_FREQUENCY, 'dogstatsd_interval': DEFAULT_STATSD_FREQUENCY, 'dogstatsd_normalize': 'yes', 'dogstatsd_port': 8125, 'dogstatsd_target': 'http://localhost:17123', 'graphite_listen_port': None, 'hostname': None, 'listen_port': None, 'tags': None, 'use_ec2_instance_id': False, # DEPRECATED 'version': get_version(), 'watchdog': True, 'additional_checksd': '/etc/dd-agent/checks.d/', } dogstatsd_interval = DEFAULT_STATSD_FREQUENCY # Config handling try: # Find the right config file path = os.path.realpath(__file__) path = os.path.dirname(path) config_path = get_config_path(cfg_path, os_name=get_os()) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) # bulk import for option in config.options('Main'): agentConfig[option] = config.get('Main', option) # # Core config # # FIXME unnecessarily complex if config.has_option('Main', 'use_dd'): agentConfig['use_dd'] = config.get('Main', 'use_dd').lower() in ("yes", "true") else: agentConfig['use_dd'] = True agentConfig['use_forwarder'] = False if options is not None and options.use_forwarder: listen_port = 17123 if config.has_option('Main', 'listen_port'): listen_port = int(config.get('Main', 'listen_port')) agentConfig['dd_url'] = "http://localhost:" + str(listen_port) agentConfig['use_forwarder'] = True elif options is not None and not options.disable_dd and options.dd_url: agentConfig['dd_url'] = options.dd_url else: agentConfig['dd_url'] = config.get('Main', 'dd_url') if agentConfig['dd_url'].endswith('/'): agentConfig['dd_url'] = agentConfig['dd_url'][:-1] # Extra checks.d path # the linux directory is set by default if config.has_option('Main', 'additional_checksd'): agentConfig['additional_checksd'] = config.get('Main', 'additional_checksd') elif get_os() == 'windows': # default windows location common_path = _windows_commondata_path() agentConfig['additional_checksd'] = os.path.join(common_path, 'Datadog', 'checks.d') # Whether also to send to Pup if config.has_option('Main', 'use_pup'): agentConfig['use_pup'] = config.get('Main', 'use_pup').lower() in ("yes", "true") else: agentConfig['use_pup'] = True # Concerns only Windows if config.has_option('Main', 'use_web_info_page'): agentConfig['use_web_info_page'] = config.get('Main', 'use_web_info_page').lower() in ("yes", "true") else: agentConfig['use_web_info_page'] = True if agentConfig['use_pup'] or agentConfig['use_web_info_page']: if config.has_option('Main', 'pup_url'): agentConfig['pup_url'] = config.get('Main', 'pup_url') else: agentConfig['pup_url'] = 'http://localhost:17125' if config.has_option('Main', 'pup_port'): agentConfig['pup_port'] = int(config.get('Main', 'pup_port')) # Increases the frequency of statsd metrics when only sending to Pup if not agentConfig['use_dd'] and agentConfig['use_pup']: dogstatsd_interval = PUP_STATSD_FREQUENCY if not agentConfig['use_dd'] and not agentConfig['use_pup']: sys.stderr.write("Please specify at least one endpoint to send metrics to. This can be done in datadog.conf.") exit(2) # Which API key to use agentConfig['api_key'] = config.get('Main', 'api_key') # local traffic only? Default to no agentConfig['non_local_traffic'] = False if config.has_option('Main', 'non_local_traffic'): agentConfig['non_local_traffic'] = config.get('Main', 'non_local_traffic').lower() in ("yes", "true") # DEPRECATED if config.has_option('Main', 'use_ec2_instance_id'): use_ec2_instance_id = config.get('Main', 'use_ec2_instance_id') # translate yes into True, the rest into False agentConfig['use_ec2_instance_id'] = (use_ec2_instance_id.lower() == 'yes') if config.has_option('Main', 'check_freq'): try: agentConfig['check_freq'] = int(config.get('Main', 'check_freq')) except Exception: pass # Disable Watchdog (optionally) if config.has_option('Main', 'watchdog'): if config.get('Main', 'watchdog').lower() in ('no', 'false'): agentConfig['watchdog'] = False # Optional graphite listener if config.has_option('Main', 'graphite_listen_port'): agentConfig['graphite_listen_port'] = \ int(config.get('Main', 'graphite_listen_port')) else: agentConfig['graphite_listen_port'] = None # Dogstatsd config dogstatsd_defaults = { 'dogstatsd_port': 8125, 'dogstatsd_target': 'http://localhost:17123', 'dogstatsd_interval': dogstatsd_interval, 'dogstatsd_normalize': 'yes', } for key, value in dogstatsd_defaults.iteritems(): if config.has_option('Main', key): agentConfig[key] = config.get('Main', key) else: agentConfig[key] = value #Forwarding to external statsd server if config.has_option('Main', 'statsd_forward_host'): agentConfig['statsd_forward_host'] = config.get('Main', 'statsd_forward_host') if config.has_option('Main', 'statsd_forward_port'): agentConfig['statsd_forward_port'] = int(config.get('Main', 'statsd_forward_port')) # normalize 'yes'/'no' to boolean dogstatsd_defaults['dogstatsd_normalize'] = _is_affirmative(dogstatsd_defaults['dogstatsd_normalize']) # optionally send dogstatsd data directly to the agent. if config.has_option('Main', 'dogstatsd_use_ddurl'): use_ddurl = _is_affirmative(config.get('Main', 'dogstatsd_use_ddurl')) if use_ddurl: agentConfig['dogstatsd_target'] = agentConfig['dd_url'] # Optional config # FIXME not the prettiest code ever... if config.has_option('Main', 'use_mount'): agentConfig['use_mount'] = _is_affirmative(config.get('Main', 'use_mount')) if config.has_option('Main', 'autorestart'): agentConfig['autorestart'] = _is_affirmative(config.get('Main', 'autorestart')) try: filter_device_re = config.get('Main', 'device_blacklist_re') agentConfig['device_blacklist_re'] = re.compile(filter_device_re) except ConfigParser.NoOptionError: pass if config.has_option('datadog', 'ddforwarder_log'): agentConfig['has_datadog'] = True # Dogstream config if config.has_option("Main", "dogstream_log"): # Older version, single log support log_path = config.get("Main", "dogstream_log") if config.has_option("Main", "dogstream_line_parser"): agentConfig["dogstreams"] = ':'.join([log_path, config.get("Main", "dogstream_line_parser")]) else: agentConfig["dogstreams"] = log_path elif config.has_option("Main", "dogstreams"): agentConfig["dogstreams"] = config.get("Main", "dogstreams") if config.has_option("Main", "nagios_perf_cfg"): agentConfig["nagios_perf_cfg"] = config.get("Main", "nagios_perf_cfg") if config.has_section('WMI'): agentConfig['WMI'] = {} for key, value in config.items('WMI'): agentConfig['WMI'][key] = value if config.has_option("Main", "limit_memory_consumption") and \ config.get("Main", "limit_memory_consumption") is not None: agentConfig["limit_memory_consumption"] = int(config.get("Main", "limit_memory_consumption")) else: agentConfig["limit_memory_consumption"] = None if config.has_option("Main", "skip_ssl_validation"): agentConfig["skip_ssl_validation"] = _is_affirmative(config.get("Main", "skip_ssl_validation")) agentConfig["collect_ec2_tags"] = False if config.has_option("Main", "collect_ec2_tags"): agentConfig["collect_ec2_tags"] = _is_affirmative(config.get("Main", "collect_ec2_tags")) except ConfigParser.NoSectionError, e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2)
def get_config(parse_args=True, cfg_path=None, options=None): if parse_args: options, _ = get_parsed_args() # General config agentConfig = { 'check_freq': DEFAULT_CHECK_FREQUENCY, 'dogstatsd_port': 8125, 'dogstatsd_target': 'http://localhost:17123', 'graphite_listen_port': None, 'hostname': None, 'listen_port': None, 'tags': None, 'use_ec2_instance_id': False, # DEPRECATED 'version': get_version(), 'watchdog': True, 'additional_checksd': '/etc/dd-agent/checks.d/', 'bind_host': get_default_bind_host(), 'statsd_metric_namespace': None, 'utf8_decoding': False } # Config handling try: # Find the right config file path = os.path.realpath(__file__) path = os.path.dirname(path) config_path = get_config_path(cfg_path, os_name=get_os()) config = ConfigParser.ConfigParser() config.readfp(skip_leading_wsp(open(config_path))) # bulk import for option in config.options('Main'): agentConfig[option] = config.get('Main', option) # Store developer mode setting in the agentConfig in_developer_mode = None if config.has_option('Main', 'developer_mode'): agentConfig['developer_mode'] = _is_affirmative(config.get('Main', 'developer_mode')) # Allow an override with the --profile option if options is not None and options.profile: agentConfig['developer_mode'] = True # # Core config # # FIXME unnecessarily complex if config.has_option('Main', 'use_dd'): agentConfig['use_dd'] = config.get('Main', 'use_dd').lower() in ("yes", "true") else: agentConfig['use_dd'] = True agentConfig['use_forwarder'] = False if options is not None and options.use_forwarder: listen_port = 17123 if config.has_option('Main', 'listen_port'): listen_port = int(config.get('Main', 'listen_port')) agentConfig['dd_url'] = "http://" + agentConfig['bind_host'] + ":" + str(listen_port) agentConfig['use_forwarder'] = True elif options is not None and not options.disable_dd and options.dd_url: agentConfig['dd_url'] = options.dd_url else: agentConfig['dd_url'] = config.get('Main', 'dd_url') if agentConfig['dd_url'].endswith('/'): agentConfig['dd_url'] = agentConfig['dd_url'][:-1] # Extra checks.d path # the linux directory is set by default if config.has_option('Main', 'additional_checksd'): agentConfig['additional_checksd'] = config.get('Main', 'additional_checksd') elif get_os() == 'windows': # default windows location common_path = _windows_commondata_path() agentConfig['additional_checksd'] = os.path.join(common_path, 'Datadog', 'checks.d') if config.has_option('Main', 'use_dogstatsd'): agentConfig['use_dogstatsd'] = config.get('Main', 'use_dogstatsd').lower() in ("yes", "true") else: agentConfig['use_dogstatsd'] = True # Concerns only Windows if config.has_option('Main', 'use_web_info_page'): agentConfig['use_web_info_page'] = config.get('Main', 'use_web_info_page').lower() in ("yes", "true") else: agentConfig['use_web_info_page'] = True if not agentConfig['use_dd']: sys.stderr.write("Please specify at least one endpoint to send metrics to. This can be done in datadog.conf.") exit(2) # Which API key to use agentConfig['api_key'] = config.get('Main', 'api_key') # local traffic only? Default to no agentConfig['non_local_traffic'] = False if config.has_option('Main', 'non_local_traffic'): agentConfig['non_local_traffic'] = config.get('Main', 'non_local_traffic').lower() in ("yes", "true") # DEPRECATED if config.has_option('Main', 'use_ec2_instance_id'): use_ec2_instance_id = config.get('Main', 'use_ec2_instance_id') # translate yes into True, the rest into False agentConfig['use_ec2_instance_id'] = (use_ec2_instance_id.lower() == 'yes') if config.has_option('Main', 'check_freq'): try: agentConfig['check_freq'] = int(config.get('Main', 'check_freq')) except Exception: pass # Custom histogram aggregate/percentile metrics if config.has_option('Main', 'histogram_aggregates'): agentConfig['histogram_aggregates'] = get_histogram_aggregates(config.get('Main', 'histogram_aggregates')) if config.has_option('Main', 'histogram_percentiles'): agentConfig['histogram_percentiles'] = get_histogram_percentiles(config.get('Main', 'histogram_percentiles')) # Disable Watchdog (optionally) if config.has_option('Main', 'watchdog'): if config.get('Main', 'watchdog').lower() in ('no', 'false'): agentConfig['watchdog'] = False # Optional graphite listener if config.has_option('Main', 'graphite_listen_port'): agentConfig['graphite_listen_port'] = \ int(config.get('Main', 'graphite_listen_port')) else: agentConfig['graphite_listen_port'] = None # Dogstatsd config dogstatsd_defaults = { 'dogstatsd_port': 8125, 'dogstatsd_target': 'http://' + agentConfig['bind_host'] + ':17123', } for key, value in dogstatsd_defaults.iteritems(): if config.has_option('Main', key): agentConfig[key] = config.get('Main', key) else: agentConfig[key] = value # Create app:xxx tags based on monitored apps agentConfig['create_dd_check_tags'] = config.has_option('Main', 'create_dd_check_tags') and \ _is_affirmative(config.get('Main', 'create_dd_check_tags')) # Forwarding to external statsd server if config.has_option('Main', 'statsd_forward_host'): agentConfig['statsd_forward_host'] = config.get('Main', 'statsd_forward_host') if config.has_option('Main', 'statsd_forward_port'): agentConfig['statsd_forward_port'] = int(config.get('Main', 'statsd_forward_port')) # optionally send dogstatsd data directly to the agent. if config.has_option('Main', 'dogstatsd_use_ddurl'): if _is_affirmative(config.get('Main', 'dogstatsd_use_ddurl')): agentConfig['dogstatsd_target'] = agentConfig['dd_url'] # Optional config # FIXME not the prettiest code ever... if config.has_option('Main', 'use_mount'): agentConfig['use_mount'] = _is_affirmative(config.get('Main', 'use_mount')) if options is not None and options.autorestart: agentConfig['autorestart'] = True elif config.has_option('Main', 'autorestart'): agentConfig['autorestart'] = _is_affirmative(config.get('Main', 'autorestart')) if config.has_option('Main', 'check_timings'): agentConfig['check_timings'] = _is_affirmative(config.get('Main', 'check_timings')) if config.has_option('Main', 'exclude_process_args'): agentConfig['exclude_process_args'] = _is_affirmative(config.get('Main', 'exclude_process_args')) try: filter_device_re = config.get('Main', 'device_blacklist_re') agentConfig['device_blacklist_re'] = re.compile(filter_device_re) except ConfigParser.NoOptionError: pass if config.has_option('datadog', 'ddforwarder_log'): agentConfig['has_datadog'] = True # Dogstream config if config.has_option("Main", "dogstream_log"): # Older version, single log support log_path = config.get("Main", "dogstream_log") if config.has_option("Main", "dogstream_line_parser"): agentConfig["dogstreams"] = ':'.join([log_path, config.get("Main", "dogstream_line_parser")]) else: agentConfig["dogstreams"] = log_path elif config.has_option("Main", "dogstreams"): agentConfig["dogstreams"] = config.get("Main", "dogstreams") if config.has_option("Main", "nagios_perf_cfg"): agentConfig["nagios_perf_cfg"] = config.get("Main", "nagios_perf_cfg") if config.has_option("Main", "use_curl_http_client"): agentConfig["use_curl_http_client"] = _is_affirmative(config.get("Main", "use_curl_http_client")) else: # Default to False as there are some issues with the curl client and ELB agentConfig["use_curl_http_client"] = False if config.has_section('WMI'): agentConfig['WMI'] = {} for key, value in config.items('WMI'): agentConfig['WMI'][key] = value if (config.has_option("Main", "limit_memory_consumption") and config.get("Main", "limit_memory_consumption") is not None): agentConfig["limit_memory_consumption"] = int(config.get("Main", "limit_memory_consumption")) else: agentConfig["limit_memory_consumption"] = None if config.has_option("Main", "skip_ssl_validation"): agentConfig["skip_ssl_validation"] = _is_affirmative(config.get("Main", "skip_ssl_validation")) agentConfig["collect_instance_metadata"] = True if config.has_option("Main", "collect_instance_metadata"): agentConfig["collect_instance_metadata"] = _is_affirmative(config.get("Main", "collect_instance_metadata")) agentConfig["proxy_forbid_method_switch"] = False if config.has_option("Main", "proxy_forbid_method_switch"): agentConfig["proxy_forbid_method_switch"] = _is_affirmative(config.get("Main", "proxy_forbid_method_switch")) agentConfig["collect_ec2_tags"] = False if config.has_option("Main", "collect_ec2_tags"): agentConfig["collect_ec2_tags"] = _is_affirmative(config.get("Main", "collect_ec2_tags")) agentConfig["utf8_decoding"] = False if config.has_option("Main", "utf8_decoding"): agentConfig["utf8_decoding"] = _is_affirmative(config.get("Main", "utf8_decoding")) except ConfigParser.NoSectionError, e: sys.stderr.write('Config file not found or incorrectly formatted.\n') sys.exit(2)
__authors__ = ['"Hans Lellelid" <*****@*****.**>'] __copyright__ = "Copyright 2009 Hans Lellelid" __license__ = """Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.""" config = ConfigParser.SafeConfigParser() config.readfp(resource_stream(__name__, 'defaults.cfg')) def init_config(config_file): """ Initialize the configuration from a config file. The values in config_file will override those already loaded from the default configuration file (defaults.cfg, in current package). This method does not setup logging. @param config_file: The path to a configuration file. @type config_file: C{str} @raise ValueError: if the specified config_file could not be read.
print "Setting up the logging configuration..." logging.config.fileConfig("config/seattle_smcity.conf") logging.getLogger("boto").setLevel(logging.INFO) from smcity.analytics.worker import Worker from smcity.models.aws.aws_data import AwsDataFactory from smcity.models.test.mock_result_queue import MockResultQueue from smcity.models.test.mock_task_queue import MockTaskQueue from smcity.polygons.complex_polygon_strategy import ComplexPolygonStrategyFactory from smcity.transformers.geojson_transformer import GeoJsonTransformer print "Loading the config settings..." config = ConfigParser() configFile = open("config/seattle_smcity.conf") config.readfp(configFile) configFile.close() # Load the GeoJSON description of the police beats police_beats_geojson = geojson.loads(open("seattle_slides/seattle_police_beats.geojson").read())["features"] # Set up the components data_factory = AwsDataFactory(config) result_queue = MockResultQueue() task_queue = MockTaskQueue() worker = Worker(config, result_queue, task_queue, data_factory) transformer = GeoJsonTransformer(data_factory) geojson = None for iter in range(len(police_beats_geojson)): police_beat = ComplexPolygonStrategyFactory().from_geojson(police_beats_geojson[iter]["geometry"])
Created on Jun 13, 2016 @author: xuli ''' import logging import logging.config import psycopg2 import ConfigParser import os LOG_FILENAME = 'logging.conf' LOG_CONTENT_NAME = 'pg_log' #Read the configuration file and get the value config = ConfigParser.ConfigParser() config.readfp(open('config.ini')) host = config.get('global', 'host') db = config.get('global', 'database') usr = config.get('global', 'user') passwd = config.get('global', 'password') # def log_init(log_config_filename, logname): # logging.config.fileConfig(log_config_filename) # logger = logging.getLogger(logname) # return logger #operate database method, need return value def operate_postgre(sql): print 'Enter funtion %s...' % operate_postgre.__name__ # pgdb_logger.debug("operate_postgre enter...")
import logging.config import sys # Root path to CBIR CBIR_PATH = abspath(join(dirname(__file__),'..')) # Path to the tools directory TOOLS_PATH = join(CBIR_PATH,'tools') # This is used to translate the path in this directory to an absolute path. path = lambda *a: join(TOOLS_PATH, *a) config_fn = path('config.conf') config = ConfigParser() try: config.readfp(file(config_fn)) except IOError: sys.exit("Error: Cannot open configuration file '%s'" % config_fn) PGM_SERVER = { 'bind': config.get('server.pgm','bind'), 'port': int(config.get('server.pgm', 'port')), 'hostname': config.get('server.pgm', 'hostname') } CBIR_SERVER = { 'port': int(config.get('server.cbir', 'port')), 'hostname': config.get('server.cbir', 'hostname') } IMAGE_PATH = config.get('paths', 'images')
self.log_level = config.get("scheduler", "log_level") def clean(self, arg): try: delattr(self, arg) except: pass if __name__ == "__main__": config = ConfigParser() configFile = os.path.join(os.path.dirname(__file__), 'scheduler.conf') if os.path.exists(configFile): config.readfp(open(configFile)) else: print "could not find configuration file at %s" % configFile sys.exit() synchronizer = Synchronizer(config) if len(sys.argv) == 2: if 'workflows' == sys.argv[1]: synchronizer.syncWorkflows() else: print "Unknown command" sys.exit(2) sys.exit(0) else: print "usage: %s workflows" % sys.argv[0]
help= 'time in seconds how long the capture monitor should still continue capture files after all calls were finished (Default:10)' ) ext_args.add_argument( '--decode_workers_count', default=None, metavar='int', type=int, help= 'the count parallel workers to decode captures files. The minimal value is 1 (Default:2).' ) args = parser.parse_args() # read the config file config = ConfigParser.RawConfigParser() config.readfp(args.config_file) # set the defaults defaults = { "capture_files": False, "decode_files": None, "monitor_calls": False, "password": None, "username": "******", "show_interfaces": False, "logging_config": "logging.conf", "box_name": "fritz.box", "call_service_port": 1012, "login_not_required": None, "protocol": "http", "cap_folder": "captures/%(tcaps.Y-m-d/HMS)/",