def init_conf(argv): """ Populates CONF with key-value pairs from configuration file. """ conf = ConfigParser() conf.read(argv[1]) CONF['logfile'] = conf.get('ping', 'logfile') CONF['magic_number'] = unhexlify(conf.get('ping', 'magic_number')) CONF['db'] = conf.getint('ping', 'db') CONF['workers'] = conf.getint('ping', 'workers') CONF['debug'] = conf.getboolean('ping', 'debug') CONF['source_address'] = conf.get('ping', 'source_address') CONF['protocol_version'] = conf.getint('ping', 'protocol_version') CONF['user_agent'] = conf.get('ping', 'user_agent') CONF['services'] = conf.getint('ping', 'services') CONF['relay'] = conf.getint('ping', 'relay') CONF['socket_timeout'] = conf.getint('ping', 'socket_timeout') CONF['cron_delay'] = conf.getint('ping', 'cron_delay') CONF['ttl'] = conf.getint('ping', 'ttl') CONF['ipv6_prefix'] = conf.getint('ping', 'ipv6_prefix') CONF['nodes_per_ipv6_prefix'] = conf.getint('ping', 'nodes_per_ipv6_prefix') CONF['onion'] = conf.getboolean('ping', 'onion') CONF['tor_proxy'] = None if CONF['onion']: tor_proxy = conf.get('ping', 'tor_proxy').split(":") CONF['tor_proxy'] = (tor_proxy[0], int(tor_proxy[1])) CONF['crawl_dir'] = conf.get('ping', 'crawl_dir') if not os.path.exists(CONF['crawl_dir']): os.makedirs(CONF['crawl_dir']) # Set to True for master process CONF['master'] = argv[2] == "master"
class ConfigReader(object): """ 为傲世堂的游戏项目配置文件定制的配置读取类。 陈超写的arg.gameOption耦合性太强,只能在bible内使用。 但是配置文件的结构设计的很合理。 此类就是根据原来的结构设计重新写的解耦并且适用性更广的类。 Example:: conf = ConfigReader(game, region) ip = conf.get("mobile_www_ip") if conf.has_option("mobile_www_port") port = conf.getint("mobile_www_port") """ def __init__(self, game, section, conf_dir='/app/opbin/work/bible/conf'): self.game = game self.section = section self.conf_file = '{}/{}.conf'.format(conf_dir.rstrip('/'), self.game) self.config = ConfigParser() self.config.read(self.conf_file) self.has_section = self.config.has_section(self.section) def has_option(self, option): return self._has_option(self.section, option) or self._has_option('common', option) def _has_option(self, section, option): return self.config.has_option(section, option) def get(self, option, raw=0, var=None): if self._has_option(self.section, option): return self.config.get(self.section, option, raw, var) elif self._has_option('common', option): return self.config.get('common', option, raw, var) else: raise Exception("Can't find option: {} in {}".format(option, self.conf_file)) def getint(self, option): if self._has_option(self.section, option): return self.config.getint(self.section, option) elif self._has_option('common', option): return self.config.getint('common', option) else: raise Exception("Can't find option: {} in {}".format(option, self.conf_file)) def getfloat(self, option): if self._has_option(self.section, option): return self.config.getfloat(self.section, option) elif self._has_option('common', option): return self.config.getfloat('common', option) else: raise Exception("Can't find option: {} in {}".format(option, self.conf_file)) def getboolean(self, option): if self._has_option(self.section, option): return self.config.getboolean(self.section, option) elif self._has_option('common', option): return self.config.getboolean('common', option) else: raise Exception("Can't find option: {} in {}".format(option, self.conf_file))
def load_config(args=None): if args is None or isinstance(args, basestring): namespace = argparse.Namespace() if args is None: namespace.config = DEFAULT_CONFIG_FILE else: namespace.config = args args = namespace try: config = ConfigParser() config.read(args.config) args.ip = config.get('manager', 'ip') args.port = config.getint('manager', 'port') args.authkey = config.get('manager', 'authkey') args.batch_size = config.getint('manager', 'batch_size') args.columns = config.get('output', 'columns') args.uses_sqlite = config.getboolean('worker', 'uses_sqlite') args.processes = config.getint('worker', 'processes') args.nth = config.getint('worker', 'nth') args.distance = config.getint('worker', 'distance') args.dbpath = config.get('db', 'path') args.zip2ws_db = os.path.join(args.dbpath, config.get('db', 'zip2ws')) except Exception as e: logging.error(str(e)) return args
def init_settings(argv): """ Populates SETTINGS with key-value pairs from configuration file. """ conf = ConfigParser() conf.read(argv[1]) SETTINGS['logfile'] = conf.get('crawl', 'logfile') SETTINGS['seeders'] = conf.get('crawl', 'seeders').strip().split("\n") SETTINGS['workers'] = conf.getint('crawl', 'workers') SETTINGS['debug'] = conf.getboolean('crawl', 'debug') SETTINGS['user_agent'] = conf.get('crawl', 'user_agent') SETTINGS['socket_timeout'] = conf.getint('crawl', 'socket_timeout') SETTINGS['cron_delay'] = conf.getint('crawl', 'cron_delay') SETTINGS['max_age'] = conf.getint('crawl', 'max_age') SETTINGS['ipv6'] = conf.getboolean('crawl', 'ipv6') exclude_nodes = conf.get('crawl', 'exclude_nodes').strip().split("\n") exclude_networks = conf.get('crawl', 'exclude_networks').strip().split("\n") for network in exclude_networks: exclude_nodes.extend( [str(address) for address in list(ip_network(unicode(network)))]) SETTINGS['exclude_nodes'] = set(exclude_nodes) SETTINGS['crawl_dir'] = conf.get('crawl', 'crawl_dir') if not os.path.exists(SETTINGS['crawl_dir']): os.makedirs(SETTINGS['crawl_dir']) SETTINGS['master'] = argv[2] == "master"
def __init__(self): defaults = getDefaults() config = ConfigParser() config.read(defaults) self.tapertype = config.get('signal', 'tapertype') self.taperwidth = config.getfloat('signal', 'taperwidth') self.maxiter = config.getint('iccs', 'maxiter') self.convepsi = config.getfloat('iccs', 'convepsi') self.convtype = config.get('iccs', 'convtype') self.stackwgt = config.get('iccs', 'stackwgt') self.srate = config.getfloat('iccs', 'srate') self.fstack = config.get('iccs', 'fstack') # SAC headers for time window, trace selection, and quality factors self.twhdrs = config.get('sachdrs', 'twhdrs').split() self.hdrsel = config.get('sachdrs', 'hdrsel') self.qfactors = config.get('sachdrs', 'qfactors').split() self.qheaders = config.get('sachdrs', 'qheaders').split() self.qweights = [ float(val) for val in config.get('sachdrs', 'qweights').split() ] # SAC headers for ICCS time picks self.ichdrs = config.get('sachdrs', 'ichdrs').split() # Choose a xcorr module and function self.shift = config.getint('iccs', 'shift') modu = config.get('iccs', 'xcorr_modu') func = config.get('iccs', 'xcorr_func') cmd = 'from %s import %s; xcorr=%s' % (modu, func, func) exec cmd self.xcorr = xcorr self.xcorr_modu = modu self.xcorr_func = func
def __init__(self): config = ConfigParser() defaults = getDefaults() config.read(defaults) # SAC headers for time window, trace selection, and quality factors self.twhdrs = config.get('sachdrs', 'twhdrs').split() self.hdrsel = config.get('sachdrs', 'hdrsel') self.qfactors = config.get('sachdrs', 'qfactors').split() self.qheaders = config.get('sachdrs', 'qheaders').split() self.qweights = [ float(val) for val in config.get('sachdrs', 'qweights').split() ] # SAC plots self.figsize = [ float(val) for val in config.get('sacplot', 'figsize').split() ] self.rectseis = [ float(val) for val in config.get('sacplot', 'rectseis').split() ] self.colorwave = config.get('sacplot', 'colorwave') self.colorwavedel = config.get('sacplot', 'colorwavedel') self.colortwfill = config.get('sacplot', 'colortwfill') self.colortwsele = config.get('sacplot', 'colortwsele') self.alphatwfill = config.getfloat('sacplot', 'alphatwfill') self.alphatwsele = config.getfloat('sacplot', 'alphatwsele') self.npick = config.getint('sacplot', 'npick') self.pickcolors = config.get('sacplot', 'pickcolors') self.pickstyles = config.get('sacplot', 'pickstyles').split() self.minspan = config.getint('sacplot', 'minspan') self.srate = config.getfloat('sacplot', 'srate') self.tapertype = config.get('signal', 'tapertype') self.taperwidth = config.getfloat('signal', 'taperwidth')
def canCheckin(toCheckin): """ @returns: True if destination is not locked by another user AND this checkin will not overwrite a newer version """ chkoutInfo = ConfigParser() chkoutInfo.read(os.path.join(toCheckin, ".checkoutInfo")) chkInDest = chkoutInfo.get("Checkout", "checkedoutfrom") version = chkoutInfo.getint("Checkout", "version") lockedbyme = chkoutInfo.getboolean("Checkout", "lockedbyme") nodeInfo = ConfigParser() nodeInfo.read(os.path.join(chkInDest, ".nodeInfo")) locked = nodeInfo.getboolean("Versioning", "locked") latestVersion = nodeInfo.getint("Versioning", "latestversion") #TODO raise different exceptions to give override options to the user result = True if lockedbyme == False: if locked == True: result = False if version < latestVersion: result = False return result
def get_value(self, section, option): """ Retourne la valeur de l'option contenue dans la section passée en paramètre. """ # On travaille sur le nom de la section parente # au cas où s'il s'agit d'une sous-section. parent_section = self.get_parent_section_name(section) # On vérifie d'abord que la section existe. if self.__spec_sections.has_key(parent_section): # Puis on récupère la spécification de la section. section_spec = self.__spec_sections.get(parent_section) option_type = None # On parcours les options de la spécification à la recherche # du type de la valeur de l'option que l'on souhaite obtenir. for option_spec in section_spec[2]: if option_spec[0] == option: option_type = option_spec[1] # Introuvable dans les options de la section ? # On regarde dans ceux de la sous-section si elle existe. if self.__spec_has_subsection(parent_section): for sub_option_spec in section_spec[3]: if sub_option_spec[0] == option: option_type = sub_option_spec[1] # On appelle la fonction qui va bien en fonction du type à obtenir. # # Les sous-sections héritent des options de leur section parente. # Si l'option n'existe pas dans la section, il doit sûrement s'agir # d'une sous-section. On cherche alors l'option dans la section # parente. if option_type == 'string': try: return ConfigParser.get(self, section, option) except NoOptionError: return ConfigParser.get(self, parent_section, option) if option_type == 'int': try: return ConfigParser.getint(self, section, option) except NoOptionError: return ConfigParser.getint(self, parent_section, option) if option_type == 'bool': try: return ConfigParser.getboolean(self, section, option) except NoOptionError: return ConfigParser.getboolean(self, parent_section, option) return None else: raise NameError("Invalid section name: '%(section)s'." % \ {'section': section})
def __init__(self): defaults = getDefaults() config = ConfigParser() config.read(defaults) # SAC headers for time window, trace selection, and quality factors self.twhdrs = config.get("sachdrs", "twhdrs").split() self.hdrsel = config.get("sachdrs", "hdrsel") self.qfactors = config.get("sachdrs", "qfactors").split() self.qheaders = config.get("sachdrs", "qheaders").split() self.qweights = [float(val) for val in config.get("sachdrs", "qweights").split()] # SAC headers for ICCS time picks self.ichdrs = config.get("sachdrs", "ichdrs").split() # plots self.colorwave = config.get("sacplot", "colorwave") self.colortwfill = config.get("sacplot", "colortwfill") self.colortwsele = config.get("sacplot", "colortwsele") self.alphatwfill = config.get("sacplot", "alphatwfill") self.alphatwsele = config.get("sacplot", "alphatwsele") self.npick = config.getint("sacplot", "npick") self.pickcolors = config.get("sacplot", "pickcolors") self.pickstyles = config.get("sacplot", "pickstyles").split() self.minspan = config.getint("sacplot", "minspan") self.fstack = config.get("iccs", "fstack") self.thresholds = [float(val) for val in config.get("sacplot", "thresholds").split()] self.colorthresholds = config.get("sacplot", "colorthresholds").split()
def __init__(self): config = ConfigParser() defaults = getDefaults() config.read(defaults) # SAC headers for time window, trace selection, and quality factors self.twhdrs = config.get("sachdrs", "twhdrs").split() self.hdrsel = config.get("sachdrs", "hdrsel") self.qfactors = config.get("sachdrs", "qfactors").split() self.qheaders = config.get("sachdrs", "qheaders").split() self.qweights = [float(val) for val in config.get("sachdrs", "qweights").split()] # SAC plots self.figsize = [float(val) for val in config.get("sacplot", "figsize").split()] self.rectseis = [float(val) for val in config.get("sacplot", "rectseis").split()] self.colorwave = config.get("sacplot", "colorwave") self.colorwavedel = config.get("sacplot", "colorwavedel") self.colortwfill = config.get("sacplot", "colortwfill") self.colortwsele = config.get("sacplot", "colortwsele") self.alphatwfill = config.getfloat("sacplot", "alphatwfill") self.alphatwsele = config.getfloat("sacplot", "alphatwsele") self.npick = config.getint("sacplot", "npick") self.pickcolors = config.get("sacplot", "pickcolors") self.pickstyles = config.get("sacplot", "pickstyles").split() self.minspan = config.getint("sacplot", "minspan") self.srate = config.getfloat("sacplot", "srate") self.tapertype = config.get("signal", "tapertype") self.taperwidth = config.getfloat("signal", "taperwidth") self.thresholds = [float(val) for val in config.get("sacplot", "thresholds").split()] self.colorthresholds = config.get("sacplot", "colorthresholds").split()
def cloneShot(src, src_name, dst, dst_name): src_cfg = ConfigParser() dst_cfg = ConfigParser() src_cfg.read(os.path.join(src, ".nodeInfo")) src_version = src_cfg.getint("Versioning", "latestversion") dst_cfg.read(os.path.join(dst, ".nodeInfo")) dst_version = dst_cfg.getint("Versioning", "latestversion") if dst_cfg.getboolean("Versioning", "locked"): return False src_path = os.path.join(src, "src", 'v'+"%03d" % src_version) src_filepath = os.path.join(src_path, src_name+'_animation.mb') print dst_version dst_path = os.path.join(dst, "src", 'v'+"%03d" % (dst_version+1)) os.mkdir(dst_path) dst_filepath = os.path.join(dst_path, dst_name+'_animation.mb') print 'copying '+src_filepath+' to '+dst_filepath shutil.copyfile(src_filepath, dst_filepath) #write out new animation info timestamp = time.strftime("%a, %d %b %Y %I:%M:%S %p", time.localtime()) user = getUsername() comment = 'copied from '+src_name dst_cfg.set("Versioning", "lastcheckintime", timestamp) dst_cfg.set("Versioning", "lastcheckinuser", user) dst_cfg.set("Versioning", "latestversion", str(dst_version+1)) commentLine = user + ': ' + timestamp + ': ' + '"' + comment + '"' dst_cfg.set("Comments", 'v' + "%03d" % (dst_version+1,), commentLine) _writeConfigFile(os.path.join(dst, ".nodeInfo"), dst_cfg) return True
def load(file): config = ConfigParser() config.read(file) # MongoDB settings host = config.get('MongoDB', 'host') port = config.getint('MongoDB', 'port') # Regex settings google_analytics = config.get('Regex', 'google_analytics') google_adsense = config.get('Regex', 'google_adsense') # Procspy threads = config.getint('Procspy', 'threads') # RabbitMQ rabbit_queue = config.get('RabbitMQ', 'queue') if not host or not port or not google_analytics or not google_adsense or not threads: print 'Please, specify all required parameters in %s!' % file sys.exit(1) return { 'host': host, 'port': port, 'google_analytics': google_analytics, 'google_adsense': google_adsense, 'threads': threads, 'queue': rabbit_queue }
def parseConfig(config_file): config = ConfigParser() if not config.read(config_file): print("ERROR: config file '", config_file, "' not found") sys.exit(1) if not config.has_section("system.ruby.network"): print("ERROR: Ruby network not found in '", config_file) sys.exit(1) if config.get("system.ruby.network", "type") != "GarnetNetwork_d" : print("ERROR: Garnet network not used in '", config_file) sys.exit(1) number_of_virtual_networks = config.getint("system.ruby.network", "number_of_virtual_networks") vcs_per_vnet = config.getint("system.ruby.network", "vcs_per_vnet") buffers_per_data_vc = config.getint("system.ruby.network", "buffers_per_data_vc") buffers_per_control_vc = config.getint("system.ruby.network", "buffers_per_ctrl_vc") ni_flit_size_bits = 8 * config.getint("system.ruby.network", "ni_flit_size") routers = config.get("system.ruby.network", "routers").split() int_links = config.get("system.ruby.network", "int_links").split() ext_links = config.get("system.ruby.network", "ext_links").split() return (config, number_of_virtual_networks, vcs_per_vnet, buffers_per_data_vc, buffers_per_control_vc, ni_flit_size_bits, routers, int_links, ext_links)
def parseOpts(): global config argp = ArgumentParser(description='TorCheck') argp.add_argument('-c', nargs='?', default='torcheck.conf') f = vars(argp.parse_args())['c'] # config parser with these defaults set confp = ConfigParser({ # getint() method will convert to int 'reachable_port': '80', 'listen_port': '8000', 'listen_address': '127.0.0.1', 'export_filename': 'torbel_export.csv', 'status_filename': 'torbel_export.status', 'log_file': 'torbel.log', 'use_forwarded_header': 'False', }) confp.add_section('TorCheck') if f: confp.read(f) config = dict() config['listen_port'] = confp.getint('TorCheck', 'listen_port') config['listen_address'] = confp.get('TorCheck', 'listen_address') config['reachable_port'] = confp.getint('TorCheck', 'reachable_port') config['export_filename'] = confp.get('TorCheck', 'export_filename') config['status_filename'] = confp.get('TorCheck', 'status_filename') config['log_file'] = confp.get('TorCheck', 'log_file') config['use_forwarded_header'] = confp.get('TorCheck', 'use_forwarded_header')
def init(config_file="ganglia-alert.conf"): global INTERVAL, SENDER, SERVER, RECIPIENTS, ALERTS, SMS_NUMBER, DEBUG, LOG_FILE, PID_FILE, API_KEY, API_SECRET config = ConfigParser() try: config.read(config_file) INTERVAL = config.getint("options", "interval") DEBUG = config.getboolean("options", "debug") LOG_FILE = config.get("options", "log_file") PID_FILE = config.get("options", "pid_file") SENDER = config.get("mail", "sender") SERVER = config.get("mail", "server") RECIPIENTS = [s.strip() for s in config.get("mail", "recipients").split(",")] SMS_NUMBER = config.get("sms", "recipient") API_KEY = config.get("sms", "api_key") API_SECRET = config.get("sms", "api_secret") for section in config.sections(): if section[:5] == 'Alert': severity = config.get(section, 'type') expression = config.get(section, 'expression') message = config.get(section, 'message') action = config.get(section, 'action') occurences = config.getint(section, 'occurences') ALERTS.append(Alert(severity, expression, message, action, occurences)) logging.basicConfig(format=FORMAT, filename=LOG_FILE, level=logging.DEBUG) except: debug("Bad configuration"); sys.exit(2)
def run_gui(input_start_page, end_page, strict): """ Batch cleans the pages in text/clean.""" config = ConfigParser() config.read('book.cnf') if strict and \ config.has_option('process', 'last_strict_page'): hold_page = config.getint('process', 'last_strict_page') elif not strict and \ config.has_option('process', 'last_checked_page'): hold_page = config.getint('process', 'last_checked_page') else: hold_page = input_start_page print hold_page if input_start_page == 0: start_page = hold_page else: start_page = input_start_page lang = get_lang() lm = line_manager.LineManager( spell_checker.AspellSpellChecker(lang, './dict.{}.pws'.format(lang)), start_page, end_page ) lm.load('text/clean') app = gui.main(lm, strict) lm.write_pages('text/clean', False) if strict and int(app.last_page) >= hold_page: config.set('process', 'last_strict_page', app.last_page) elif not strict and int(app.last_page) >= hold_page: config.set('process', 'last_checked_page', app.last_page) with open('book.cnf', 'wb') as f: config.write(f)
def __init__(self, conffile): Observable.__init__(self) cp = ConfigParser() cp.read(conffile) if not Globals.globSimulate: wiringpi.wiringPiSetupGpio() self.md = MotorDriver([ cp.getint("MotorDriver", "LEFT_L"), cp.getint("MotorDriver", "LEFT_R"), cp.getint("MotorDriver", "RIGHT_L"), cp.getint("MotorDriver", "RIGHT_R"), cp.getint("MotorDriver", "VERTICAL_L"), cp.getint("MotorDriver", "VERTICAL_R"), ]) self.md.start() self.ds = DistanceSensor( cp.getint("DistanceSensor", "TRIGGER"), cp.getint("DistanceSensor", "ECHO") ) self.ds.start() self.acl = AltitudeControlLoop(self.md, self.ds) self.update("MotorDriver", self.md) self.update("DistanceSensor", self.ds) self.update("AltitudeControlLoop", self.acl) self.md.subscribe(self.update) self.ds.subscribe(self.update) self.acl.subscribe(self.update) self.setCamera(False)
def parse_input(config_file): config = ConfigParser() config.read(config_file) # get params from file, using test values as defaults if missing options lambd = ( config.getfloat("Material Parameters", "lambd") if config.has_option("Material Parameters", "lambd") else 0.5 ) mu = config.getfloat("Material Parameters", "mu") if config.has_option("Material Parameters", "mu") else 1.0 rho = config.getfloat("Material Parameters", "rho") if config.has_option("Material Parameters", "rho") else 3.0 min_x = config.getfloat("Coordinates", "min_x") if config.has_option("Coordinates", "min_x") else 0.0 max_x = config.getfloat("Coordinates", "max_x") if config.has_option("Coordinates", "max_x") else 5.0 min_y = config.getfloat("Coordinates", "min_y") if config.has_option("Coordinates", "min_y") else 0.0 max_y = config.getfloat("Coordinates", "max_y") if config.has_option("Coordinates", "max_y") else 5.0 min_z = config.getfloat("Coordinates", "min_z") if config.has_option("Coordinates", "min_z") else 0.0 max_z = config.getfloat("Coordinates", "max_z") if config.has_option("Coordinates", "max_z") else 5.0 N_x = config.getint("Grid Points", "N_x") if config.has_option("Grid Points", "N_x") else 100 N_y = config.getint("Grid Points", "N_y") if config.has_option("Grid Points", "N_y") else 100 N_z = config.getint("Grid Points", "N_z") if config.has_option("Grid Points", "N_z") else 100 t_0 = config.getfloat("Time Parameters", "t_0") if config.has_option("Time Parameters", "t_0") else 0.0 t_f = config.getfloat("Time Parameters", "t_f") if config.has_option("Time Parameters", "t_f") else 2.5 N_t = config.getint("Time Parameters", "N_t") if config.has_option("Time Parameters", "N_t") else 100 output = config.get("Output File", "data") if config.has_option("Output File", "data") else "output.dat" return lambd, mu, rho, min_x, max_x, min_y, max_y, min_z, max_z, N_x, N_y, N_z, t_0, t_f, N_t, output
def load_arguments(file_name): """load arguments from arguments.conf""" arg = {} cf = ConfigParser() cf.read(file_name) arg["start_time"] = cf.getint("arguments", "start_time") arg["time_length"] = cf.getint("arguments", "time_length") arg["bandwidth"] = cf.getint("arguments", "bandwidth") arg["out_range"] = cf.getint("arguments", "out_range") arg["source"] = cf.get("arguments", "source") arg["input"] = cf.get("arguments", "input") arg["output"] = cf.get("arguments", "output") arg["all_recall"] = cf.getfloat("prediction", "all_recall") arg["all_precision"] = cf.getfloat("prediction", "all_precision") arg["text_recall"] = cf.getfloat("prediction", "text_recall") arg["text_precision"] = cf.getfloat("prediction", "text_precision") arg["image_recall"] = cf.getfloat("prediction", "image_recall") arg["image_precision"] = cf.getfloat("prediction", "image_precision") arg["app_recall"] = cf.getfloat("prediction", "app_recall") arg["app_precision"] = cf.getfloat("prediction", "video_precision") arg["video_recall"] = cf.getfloat("prediction", "video_recall") arg["video_precision"] = cf.getfloat("prediction", "all_precision") arg["audio_recall"] = cf.getfloat("prediction", "audio_recall") arg["audio_precision"] = cf.getfloat("prediction", "audio_precision") arg["other_recall"] = cf.getfloat("prediction", "other_recall") arg["other_precision"] = cf.getfloat("prediction", "other_precision") return arg
def parseConfigurationFile(self, configFile): """ Parse the configuration file to get base model parameters """ # Initialize defaults defaultParams = {} # CUDA kernels are defined externally in a .cu file defaultParams["cu_dir"] = os.path.join("pyhawkes", "cuda", "cpp") defaultParams["cu_file"] = "process_id_kernels.cu" defaultParams["thin"] = 1 # K is now the number of meta processes. defaultParams["K"] = 1 # Create a config parser object and read in the file cfgParser = ConfigParser(defaultParams) cfgParser.read(configFile) self.params = {} self.params["cu_dir"] = cfgParser.get("proc_id_model", "cu_dir") self.params["cu_file"] = cfgParser.get("proc_id_model", "cu_file") self.params["thin"] = cfgParser.getint("proc_id_model", "thin") self.params["blockSz"] = cfgParser.getint("cuda", "blockSz") # Parse the params for the meta process model self.params["K"] = cfgParser.getint("proc_id_model", "K")
def canCheckin(filePath): """ @returns: True if destination is not locked by another user AND this checkin will not overwrite a newer version """ toCheckin = os.path.join(getUserCheckoutDir(), os.path.basename(os.path.dirname(filePath))) chkoutInfo = ConfigParser() chkoutInfo.read(os.path.join(toCheckin, ".checkoutInfo")) chkInDest = chkoutInfo.get("Checkout", "checkedoutfrom") version = chkoutInfo.getint("Checkout", "version") lockedbyme = chkoutInfo.getboolean("Checkout", "lockedbyme") # currently we call it "lockedbyme"... but it's true for everyone, no matter what. Not a particuarly good name. nodeInfo = ConfigParser() nodeInfo.read(os.path.join(chkInDest, ".nodeInfo")) locked = nodeInfo.getboolean("Versioning", "locked") # This actually checks if it is locked. latestVersion = nodeInfo.getint("Versioning", "latestversion") result = True if lockedbyme == False: if locked == True: result = False if version < latestVersion: result = False return result
def read(self, f): '''Read the settings from the given file handle.''' cfg = ConfigParser() cfg.readfp(f) netSection = 'Network' if cfg.has_section(netSection): if cfg.has_option(netSection, 'defaultIpAddress'): self.defaultIpAddress = cfg.get(netSection, 'defaultIpAddress') if cfg.has_option(netSection, 'defaultPort'): self.defaultPort = cfg.getint(netSection, 'defaultPort') if cfg.has_option(netSection, 'ephemeralPortsFrom'): self.ephemeralPorts[0] = cfg.getint(netSection, 'ephemeralPortsFrom') if cfg.has_option(netSection, 'ephemeralPortsTo'): self.ephemeralPorts[1] = cfg.getint(netSection, 'ephemeralPortsTo') tftpSection = 'TFTP' if cfg.has_section(tftpSection): if cfg.has_option(tftpSection, 'timeout'): self.tftpTimeout = cfg.getfloat(tftpSection, 'timeout') if cfg.has_option(tftpSection, 'retries'): self.tftpRetries = cfg.getint(tftpSection, 'retries') serverSection = 'Server' if cfg.has_section(serverSection): if cfg.has_option(serverSection, 'defaultDirectory'): self.defaultDirectory = cfg.get(serverSection, 'defaultDirectory') if cfg.has_option(serverSection, 'saveLastUsed'): self.saveLastUsed = cfg.getboolean(serverSection, 'saveLastUsed')
def readIni(nb): global K, N, cut, gui, distrWE, distrNS, vehphWEA, vehphNSA, maxSumFlow, tlType, intergreenLength, GSum global phaseMinWE, phaseMaxWE, phaseMinNS, phaseMaxNS, maxGap, detPos filename = 'input' + str(nb).zfill(2) + '.ini' ini = ConfigParser() ini.read(filename) K = ini.getint("general", "K") N = ini.getint("general", "N") cut = ini.getboolean("general", "cut") gui = ini.getboolean("general", "gui") distrWE = ini.get("demand", "distrWE") distrNS = ini.get("demand", "distrNS") vehphWEA = eval(ini.get("demand", "vehphWEA")) vehphNSA = eval(ini.get("demand", "vehphNSA")) maxSumFlow = ini.getint("demand", "maxSumFlow") tlType = ini.get("TL", "tlType") intergreenLength = ini.getint("TL", "intergreenLength") GSum = ini.getfloat("TL", "GSum") [phaseMinWE, phaseMaxWE] = eval(ini.get("TL", "phaseMinMaxWE")) [phaseMinNS, phaseMaxNS] = eval(ini.get("TL", "phaseMinMaxNS")) maxGap = ini.getfloat("TL", "maxGap") detPos = ini.getfloat("TL", "detPos") return filename
def main(): script_path = os.path.dirname(os.path.realpath(__file__)) config_path = os.path.join(script_path, 'etc/harvest.cfg') config = ConfigParser() config.read(config_path) sockets = bind_sockets(config.get('server', 'port'), config.get('server', 'address')) fork_processes(config.getint('server', 'instances')) datastore = DataStore(config.get('datastore', 'host'), config.getint('datastore', 'port'), config.get('datastore', 'username'), config.get('datastore', 'password'), config.get('datastore', 'database')) app = Application([(r"/rpc/store", Handler, {'datastore': datastore, 'api_key': config.get('server', 'api_key')})]) server = HTTPServer(app, no_keep_alive=config.get('server', 'no_keep_alive'), ssl_options={ 'certfile': config.get('server', 'certfile'), 'keyfile': config.get('server', 'keyfile')}) server.add_sockets(sockets) IOLoop.instance().start()
def parse_config(config_file): if not os.path.isfile(config_file): sys.exit("Could not find configuration file: {0}".format(config_file)) parser = ConfigParser() parser.read(config_file) log_file = None loggly_token = None if parser.getboolean('file_log', 'enabled'): log_file = parser.get('file_log', 'file') do_logging(log_file, loggly_token) config = {} if parser.getboolean('loggly_log', 'enabled'): config['loggly_token'] = parser.get('loggly_log', 'token') config['mongo_db'] = parser.get('mongodb', 'database') config['hpf_feeds'] = parser.get('hpfriends', 'channels').split(',') config['hpf_ident'] = parser.get('hpfriends', 'ident') config['hpf_secret'] = parser.get('hpfriends', 'secret') config['hpf_port'] = parser.getint('hpfriends', 'port') config['hpf_host'] = parser.get('hpfriends', 'host') config['webapi_port'] = parser.getint('webapi', 'port') config['webapi_host'] = parser.get('webapi', 'host') return config
def __init__(self): defaults = getDefaults() config = ConfigParser() config.read(defaults) self.tapertype = config.get("signal", "tapertype") self.taperwidth = config.getfloat("signal", "taperwidth") self.maxiter = config.getint("iccs", "maxiter") self.convepsi = config.getfloat("iccs", "convepsi") self.convtype = config.get("iccs", "convtype") self.stackwgt = config.get("iccs", "stackwgt") self.srate = config.getfloat("iccs", "srate") self.fstack = config.get("iccs", "fstack") # SAC headers for time window, trace selection, and quality factors self.twhdrs = config.get("sachdrs", "twhdrs").split() self.hdrsel = config.get("sachdrs", "hdrsel") self.qfactors = config.get("sachdrs", "qfactors").split() self.qheaders = config.get("sachdrs", "qheaders").split() self.qweights = [float(val) for val in config.get("sachdrs", "qweights").split()] # SAC headers for ICCS time picks self.ichdrs = config.get("sachdrs", "ichdrs").split() # Choose a xcorr module and function self.shift = config.getint("iccs", "shift") modu = config.get("iccs", "xcorr_modu") func = config.get("iccs", "xcorr_func") cmd = "from %s import %s; xcorr=%s" % (modu, func, func) exec cmd self.xcorr = xcorr self.xcorr_modu = modu self.xcorr_func = func
def load_config(self): """ Loads the vPoller Worker Manager configuration settings """ logger.debug('Loading config file %s', self.config_file) parser = ConfigParser(self.config_defaults) parser.read(self.config_file) self.config['mgmt'] = parser.get('worker', 'mgmt') self.config['db'] = parser.get('worker', 'db') self.config['proxy'] = parser.get('worker', 'proxy') self.config['helpers'] = parser.get('worker', 'helpers') self.config['tasks'] = parser.get('worker', 'tasks') self.config['cache_enabled'] = parser.getboolean('cache', 'enabled') self.config['cache_maxsize'] = parser.getint('cache', 'maxsize') self.config['cache_ttl'] = parser.getint('cache', 'ttl') self.config['cache_housekeeping'] = parser.getint('cache', 'housekeeping') if self.config['helpers']: self.config['helpers'] = self.config['helpers'].split(',') if self.config['tasks']: self.config['tasks'] = self.config['tasks'].split(',') logger.debug( 'Worker Manager configuration: %s', self.config )
def __init__(self, hosts, topo_list=None, config_file=None): """Create a new Hadoop cluster with the given hosts and topology. Args: hosts (list of Host): The hosts to be assigned a topology. topo_list (list of str, optional): The racks to be assigned to each host. len(hosts) should be equal to len(topo_list). configFile (str, optional): The path of the config file to be used. """ # Load cluster properties config = ConfigParser(self.defaults) config.add_section("cluster") config.add_section("local") if config_file: config.readfp(open(config_file)) self.base_dir = config.get("cluster", "hadoop_base_dir") self.conf_dir = config.get("cluster", "hadoop_conf_dir") self.logs_dir = config.get("cluster", "hadoop_logs_dir") self.hadoop_temp_dir = config.get("cluster", "hadoop_temp_dir") self.hdfs_port = config.getint("cluster", "hdfs_port") self.mapred_port = config.getint("cluster", "mapred_port") self.local_base_conf_dir = config.get("local", "local_base_conf_dir") self.bin_dir = self.base_dir + "/bin" self.sbin_dir = self.base_dir + "/bin" # Configure master and slaves self.hosts = hosts self.master = hosts[0] # Create topology self.topology = HadoopTopology(hosts, topo_list) # Store cluster information self.host_clusters = {} for h in self.hosts: g5k_cluster = get_host_cluster(h) if g5k_cluster in self.host_clusters: self.host_clusters[g5k_cluster].append(h) else: self.host_clusters[g5k_cluster] = [h] # Create a string to display the topology t = {v: [] for v in self.topology.topology.values()} for key, value in self.topology.topology.iteritems(): t[value].append(key.address) log_topo = ', '.join([style.user2(k) + ': ' + ' '.join(map(lambda x: style.host(x.split('.')[0]), v)) for k, v in t.iteritems()]) logger.info("Hadoop cluster created with master %s, hosts %s and topology %s", style.host(self.master.address), ' '.join([style.host(h.address.split('.')[0]) for h in self.hosts]), log_topo)
def create_config(config_path): config = dict() config_raw = ConfigParser() config_raw.read(DEFAULT_CONFIG) config_raw.read(config_path) ### Config config['docheck'] = config_raw.getboolean( 'Edit Watcher', 'ENABLE') config['db_extra_dbname'] = config_raw.get( 'Edit Watcher', 'EXTRA_PORTAL_DB_NAME') config['db_extra_table'] = config_raw.get( 'Edit Watcher', 'EXTRA_PORTAL_TABLE') config['webhook'] = config_raw.get( 'Edit Watcher', 'WEBHOOK_URL') config['embed_image'] = config_raw.get( 'Edit Watcher', 'IMAGE') config['deleted_maxcount'] = config_raw.getint( 'Edit Watcher', 'DELETED_LIMIT') config['deleted_maxtime'] = config_raw.get( 'Edit Watcher', 'DELETED_TIMESPAN') config['language'] = config_raw.get( 'Config', 'LANGUAGE') config['bbox'] = config_raw.get( 'Config', 'BBOX') config['bbox'] = list(config['bbox'].split(',')) config['db_scan_schema'] = config_raw.get( 'DB', 'SCANNER_DB_SCHEMA') config['db_portal_schema'] = config_raw.get( 'DB', 'PORTAL_DB_SCHEMA') config['db_host'] = config_raw.get( 'DB', 'HOST') config['db_port'] = config_raw.getint( 'DB', 'PORT') config['db_user'] = config_raw.get( 'DB', 'USER') config['db_pass'] = config_raw.get( 'DB', 'PASSWORD') config['db_portal_dbname'] = config_raw.get( 'DB', 'PORTAL_DB_NAME') return config
def __init__(self, configfile): """ PyWiiconfig initialization """ CFG = ConfigParser() CFG.read(configfile) # self.DEBUG = CFG.getboolean("main", "DEBUG") self.DEBUG_LEVEL = CFG.getint("main", "DEBUG_LEVEL") self.SLEEP_DURATION = CFG.getint("main", "SLEEP_DURATION") # self.DISCOVER_DURATION = CFG.getint("main", "DISCOVER_DURATION") self.SOCK_TIMEOUT_DURATION = CFG.getfloat("main", "SOCK_TIMEOUT_DURATION") # self.DAEMONIZE = CFG.getboolean("main", "DAEMONIZE") self.ON_EXIT_HOOK = CFG.get("main", "ON_EXIT_HOOK") # wiimote buttons map is at http://wiibrew.org/wiki/Wiimote#Core_Buttons # temp hacks BUTTON_HOME = "quit" BUTTON_1 = "accel" BUTTON_2 = "status" self.keymap = range(0, 8) self.keymap[0] = [ CFG.get("button_action", "BUTTON_ACTION_LEFT"), BUTTON_2 ] self.keymap[1] = [ CFG.get("button_action", "BUTTON_ACTION_RIGHT"), BUTTON_1 ] self.keymap[2] = [ CFG.get("button_action", "BUTTON_ACTION_DOWN"), CFG.get("button_action", "BUTTON_ACTION_B") ] self.keymap[3] = [ CFG.get("button_action", "BUTTON_ACTION_UP"), CFG.get("button_action", "BUTTON_ACTION_A") ] self.keymap[4] = [ CFG.get("button_action", "BUTTON_ACTION_PLUS"), CFG.get("button_action", "BUTTON_ACTION_MINUS") ] self.keymap[5] = [0, 0] self.keymap[6] = [0, 0] self.keymap[7] = [0, BUTTON_HOME] # accelerometer configuration self.ACCELEROMETER_ZERO = CFG.getint("accel_config", "ACCELEROMETER_ZERO") self.ACCELEROMETER_PRECISION = CFG.getint("accel_config", "ACCELEROMETER_PRECISION") self.ACCEL_ACTION = { "left": CFG.get("accel_action", "ACCEL_X_LEFT_ACTION"), "right": CFG.get("accel_action", "ACCEL_X_RIGHT_ACTION"), "up": CFG.get("accel_action", "ACCEL_Y_UP_ACTION"), "down": CFG.get("accel_action", "ACCEL_Y_DOWN_ACTION"), "front": CFG.get("accel_action", "ACCEL_Z_FRONT_ACTION"), "back": CFG.get("accel_action", "ACCEL_Z_BACK_ACTION"), }
def __init__(self): config = ConfigParser() config.read('/usr/local/pi-shutdown-listener/etc/config.cfg') self.button_pin = config.getint('general', 'pin') self.button_timeout = config.getint('general', 'timeout') self.time_stamp = time.time() self.timer = None
def migrate(filename): gconf_client = gconf.client_get_default() old_config = ConfigParser() old_config.read(filename) alarm_values_bool = ('show_startup_notification', 'use_alert_dialog', 'show_before_alarm', 'show_pay_notification', 'show_alarm', 'show_due_alarm') for name in alarm_values_bool: if old_config.has_option("Alarm", name): value = old_config.getboolean("Alarm", name) gconf_path = GCONF_ALARM_PATH + name gconf_client.set_bool(gconf_path, value) alarm_values_int = ('interval', 'notification_days_limit', 'show_alarm_before_days') for name in alarm_values_int: if old_config.has_option("Alarm", name): value = old_config.getint("Alarm", name) gconf_path = GCONF_ALARM_PATH + name gconf_client.set_int(gconf_path, value) gui_values_int = ('due_date', 'show_paid_bills', 'width', 'height', 'x', 'y') for name in gui_values_int: if old_config.has_option("GUI", name): value = old_config.getint("GUI", name) gconf_path = GCONF_GUI_PATH + name gconf_client.set_int(gconf_path, value) gui_values_bool = ('show_menubar', 'show_toolbar') for name in gui_values_bool: if old_config.has_option("GUI", name): value = old_config.getboolean("GUI", name) gconf_path = GCONF_GUI_PATH + name gconf_client.set_bool(gconf_path, value) if old_config.has_option("General", "delay"): value = old_config.getint("General", "delay") gconf_path = GCONF_PATH + "delay" gconf_client.set_int(gconf_path, value) if old_config.has_option("General", "start_in_tray"): value = old_config.getboolean("General", "start_in_tray") gconf_path = GCONF_PATH + "start_in_tray" gconf_client.set_bool(gconf_path, value) if old_config.has_option("Alarm", "show_alarm_at_time"): value = old_config.get("Alarm", "show_alarm_at_time") gconf_path = GCONF_ALARM_PATH + "show_alarm_at_time" gconf_client.set_string(gconf_path, value) os.remove(filename)
def main(): if len(sys.argv) < 2: sys.stderr.write('Usage: %s CONFIG_URI\n' % sys.argv[0]) sys.exit(1) config_uri = sys.argv.pop(1) config = ConfigParser() config.read(config_uri) vroot = getenv('VIRTUOSO_ROOT') assert vroot, 'Please define the VIRTUOSO_ROOT environment variable' assert exists(vroot), "VIRTUOSO_ROOT directory does not exist" assert exists(join(vroot, 'bin', 'virtuoso-t')),\ "VIRTUOSO_ROOT directory does not contain bin/virtuoso-t" assert exists('var/db/virtuoso.ini.tmpl'),\ "Please run this script from the assembl root." vroot_var = join(vroot, 'var') if not exists(vroot_var): vroot_var = '/var' vroot_lib = join(vroot, 'lib') assert exists(vroot_lib) if not exists(join(vroot_lib, 'virtodbcu.so'))\ and exists(join(vroot_lib, 'odbc', 'virtodbcu.so')): vroot_lib = join(vroot_lib, 'odbc') vname = 'virtuoso' if not exists(join(vroot, 'share', vname)): names = listdir(join(vroot, 'share')) names = [n for n in names if exists(join(vroot, 'share', n, 'vad'))] assert len(names) == 1, "Cannot identify the vad directory" vname = names[0] assert exists(join(vroot_var, 'lib', vname, 'vsp')),\ "Cannot identify the VSP directory" assert exists(join(vroot, 'lib', vname, 'hosting')),\ "Cannot identify the Virtuoso hosting directory" vars = { 'VIRTUOSO_SERVER_PORT': config.getint('virtuoso', 'http_port'), 'VIRTUOSO_PORT': config.getint('virtuoso', 'port'), 'VIRTUOSO_ROOT': vroot, 'VIRTUOSO_ROOT_VAR': vroot_var, 'VIRTUOSO_ROOT_LIB': vroot_lib, 'VIRTUOSO_SUBDIR_NAME': vname, 'CELERY_BROKER': config.get('app:main', 'celery.broker'), 'here': dirname(abspath('supervisord.conf')), 'CONFIG_FILE': config_uri } for fname in ('var/db/virtuoso.ini', 'odbc.ini', 'supervisord.conf',): tmpl = open(fname+'.tmpl').read() inifile = open(fname, 'w') inifile.write(tmpl % vars) inifile.close() if not exists('var'): mkdir('var') if not exists('var/log'): mkdir('var/log') if not exists('var/run'): mkdir('var/run')
def checkin(asset, comment): """ Checks a folder back in as the newest version @precondition: toCheckin is a valid path @precondition: canCheckin() == True OR all conflicts have been resolved """ print "Checking in asset ", asset # First, we'll have to set the comment in here. assetToCheckIn = os.path.join(getUserCheckoutDir(), os.path.basename(os.path.dirname(asset))) setComment(assetToCheckIn, comment) # Then we configure everything that is in here. # print toCheckin chkoutInfo = ConfigParser() chkoutInfo.read(os.path.join(assetToCheckIn, ".checkoutInfo")) chkInDest = chkoutInfo.get("Checkout", "checkedoutfrom") lockedbyme = chkoutInfo.getboolean("Checkout", "lockedbyme") nodeInfo = ConfigParser() nodeInfo.read(os.path.join(chkInDest, ".nodeInfo")) locked = nodeInfo.getboolean("Versioning", "locked") toKeep = nodeInfo.getint("Versioning", "Versionstokeep") newVersion = nodeInfo.getint("Versioning", "latestversion") + 1 newVersionPath = os.path.join(chkInDest, "src", "v" + ("%03d" % newVersion)) if not canCheckin(asset): print "Can not overwrite locked folder." raise Exception("Can not overwrite locked folder.") # Checkin shutil.copytree(assetToCheckIn, newVersionPath) # And fix permissions for the new version asset so that everyone can access it. os.system('chmod 774 -R ' + newVersionPath) timestamp = time.strftime("%a, %d %b %Y %I:%M:%S %p", time.localtime()) nodeInfo.set("Versioning", "lastcheckintime", timestamp) nodeInfo.set("Versioning", "lastcheckinuser", getUsername()) nodeInfo.set("Versioning", "latestversion", str(newVersion)) nodeInfo.set("Versioning", "locked", "False") amu._writeConfigFile(os.path.join(chkInDest, ".nodeInfo"), nodeInfo) #print glob.glob(os.path.join(chkInDest, "src", "*")) if toKeep > 0: amu.purge(os.path.join(chkInDest, "src"), nodeInfo, newVersion - toKeep) amu._writeConfigFile(os.path.join(chkInDest, ".nodeInfo"), nodeInfo) # Clean up shutil.rmtree(assetToCheckIn) os.remove(os.path.join(newVersionPath, ".checkoutInfo")) return chkInDest
def __init__(self, cfg_file, template): """ @param cfg_file: file path of the configuration file. @param template: list of tuples with {section:(key_name, key_type)} """ config = ConfigParser() config.read(cfg_file) # load configuration file for section in config.sections(): setattr(self, section, ConfigurationSection()) for name in config.options(section): try: value = config.getboolean(section, name) except ValueError: try: value = config.getint(section, name) except ValueError: value = config.get(section, name) setattr(getattr(self, section), name, value) # override with default values from template for section in template.keys(): # setattr even if section is not present in ini file # as it may have default value, check at value fetching setattr(self, section, ConfigurationSection()) if type(template[section]) != list: reason = "Malformed Template section type should be list" raise IrmaConfigurationError(reason) for (key_name, key_type, key_def_value) in template[section]: if not config.has_option(section, key_name): # If key not found but a default value exists, set it if key_def_value is not None: setattr(getattr(self, section), key_name, key_def_value) continue else: reason = ("file {0} ".format(cfg_file) + "missing section {0} ".format(section) + "key {0}".format(key_name)) raise IrmaConfigurationError(reason) try: if key_type == self.boolean: value = config.getboolean(section, key_name) elif key_type == self.integer: value = config.getint(section, key_name) else: value = config.get(section, key_name) setattr(getattr(self, section), key_name, value) except ValueError: reason = ("file {0} ".format(cfg_file) + "missing section {0} ".format(section) + "Wrong type for key {0}".format(key_name)) raise IrmaConfigurationError(reason)
def main(): prelog = InitLog("main") if len(sys.argv) < 2: prelog.critical("Have no sepecify the vm.conf") op = opCli(prelog) csvformat = [ 'vmrelease', 'vmuser', 'vmpass', 'vmip', 'exsiip', 'exsiuser', 'exsipass', 'vmname', 'vmmem', 'vmcpu', 'vmdisk', 'vmowner', 'vmproject', 'multidisk' ] #print csvformat if not op.pre(): pre.critical("The vm.conf configuration is wrong!!!!") sys.exit(1) csvfile = op.cf.get("vm", "path") with open(csvfile) as rf: csvreader = csv.reader(rf) header = csvreader.next() #print header if header == csvformat: prelog.info("CSV format vaild") prelog.info("Put the vm infomation to queue....") for line in csvreader: qvm.put(line) prelog.info("Put done...") else: prelog.critical("The format,header of vm is wrong!!!") sys.exit(1) cf = ConfigParser() cf.read(sys.argv[1]) try: prelog.info("Try to get the bootimg seeting") bootimg = cf.getboolean("openstack", "bootimg") except Exception as e: prelog.warning("Get the bootimg setting failed") prelog.warning("set the bootimg=True.....") bootimg = True downSize = cf.getint("multiprocess", "download") upSize = cf.getint("multiprocess", "upload") prelog.info("Start the download process...") downloadProc = batch(download, downSize) prelog.info("Start the upload process...") uploadProc = batch(upload, upSize, bootimg=bootimg) downloadProc.close() uploadProc.close() downloadProc.join() uploadProc.join() prelog.info("ALL done....")
def init_settings(argv): """ Populates SETTINGS with key-value pairs from configuration file. """ conf = ConfigParser() conf.read(argv[1]) SETTINGS['logfile'] = conf.get('chart', 'logfile') SETTINGS['debug'] = conf.getboolean('chart', 'debug') SETTINGS['interval'] = conf.getint('chart', 'interval') SETTINGS['export_dir'] = conf.get('chart', 'export_dir') SETTINGS['replay'] = conf.getint('chart', 'replay')
def init_conf(argv): """ Populates CONF with key-value pairs from configuration file. """ conf = ConfigParser() conf.read(argv[1]) CONF['logfile'] = conf.get('resolve', 'logfile') CONF['magic_number'] = unhexlify(conf.get('resolve', 'magic_number')) CONF['db'] = conf.getint('resolve', 'db') CONF['debug'] = conf.getboolean('resolve', 'debug') CONF['ttl'] = conf.getint('resolve', 'ttl')
def setUp(self): conf = ConfigParser() conf.read('test.cfg') if conf.sections() == ['Test']: self.ps = thoonk.Thoonk(host=conf.get('Test', 'host'), port=conf.getint('Test', 'port'), db=conf.getint('Test', 'db')) self.ps.redis.flushdb() else: print 'No test configuration found in test.cfg' exit()
def testConfig(): ''' config read from config.txt ''' CONFIGFILE = 'config.txt' config = ConfigParser() config.read(CONFIGFILE) print config.get('messages', 'hello') print config.getint('numbers', 'lenth')
def init_settings(argv): """ Populates SETTINGS with key-value pairs from configuration file. """ conf = ConfigParser() conf.read(argv[1]) SETTINGS['logfile'] = conf.get('crawl', 'logfile') SETTINGS['network'] = conf.get('crawl', 'network') SETTINGS['seeders'] = conf.get('crawl', 'seeders').strip().split("\n") SETTINGS['workers'] = conf.getint('crawl', 'workers') SETTINGS['debug'] = conf.getboolean('crawl', 'debug') SETTINGS['source_address'] = conf.get('crawl', 'source_address') SETTINGS['protocol_version'] = conf.getint('crawl', 'protocol_version') SETTINGS['user_agent'] = conf.get('crawl', 'user_agent') SETTINGS['services'] = conf.getint('crawl', 'services') SETTINGS['relay'] = conf.getint('crawl', 'relay') SETTINGS['socket_timeout'] = conf.getint('crawl', 'socket_timeout') SETTINGS['cron_delay'] = conf.getint('crawl', 'cron_delay') SETTINGS['snapshot_delay'] = conf.getint('crawl', 'snapshot_delay') SETTINGS['max_age'] = conf.getint('crawl', 'max_age') SETTINGS['ipv6'] = conf.getboolean('crawl', 'ipv6') SETTINGS['ipv6_prefix'] = conf.getint('crawl', 'ipv6_prefix') SETTINGS['nodes_per_ipv6_prefix'] = conf.getint('crawl', 'nodes_per_ipv6_prefix') SETTINGS['exclude_ipv4_networks'] = list_excluded_networks( conf.get('crawl', 'exclude_ipv4_networks')) SETTINGS['exclude_ipv6_networks'] = list_excluded_networks( conf.get('crawl', 'exclude_ipv6_networks')) SETTINGS['exclude_ipv4_bogons'] = conf.getboolean('crawl', 'exclude_ipv4_bogons') SETTINGS['initial_exclude_ipv4_networks'] = \ SETTINGS['exclude_ipv4_networks'] SETTINGS['onion'] = conf.getboolean('crawl', 'onion') SETTINGS['tor_proxy'] = None if SETTINGS['onion']: tor_proxy = conf.get('crawl', 'tor_proxy').split(":") SETTINGS['tor_proxy'] = (tor_proxy[0], int(tor_proxy[1])) SETTINGS['onion_nodes'] = conf.get('crawl', 'onion_nodes').strip().split("\n") SETTINGS['include_checked'] = conf.getboolean('crawl', 'include_checked') SETTINGS['crawl_dir'] = conf.get('crawl', 'crawl_dir') if not os.path.exists(SETTINGS['crawl_dir']): os.makedirs(SETTINGS['crawl_dir']) # Set to True for master process SETTINGS['master'] = argv[2] == "master"
def init_settings(argv): """ Populates SETTINGS with key-value pairs from configuration file. """ conf = ConfigParser() conf.read(argv[1]) SETTINGS['logfile'] = conf.get('pcap', 'logfile') SETTINGS['debug'] = conf.getboolean('pcap', 'debug') SETTINGS['ttl'] = conf.getint('pcap', 'ttl') SETTINGS['rtt_count'] = conf.getint('pcap', 'rtt_count') SETTINGS['pcap_dir'] = conf.get('pcap', 'pcap_dir') if not os.path.exists(SETTINGS['pcap_dir']): os.makedirs(SETTINGS['pcap_dir'])
def read_config(config_file='/usr/lib/nagios/plugins/ceph/rgw_test.conf'): config = CP() config.read(config_file) cfg = {} cfg['host'] = config.get('connection', 'host') cfg['port'] = config.getint('connection', 'port') cfg['ssl'] = config.getboolean('connection', 'ssl') cfg['block'] = config.getint('test', 'urandom_block_size') cfg['bytes'] = config.getint('test', 'total_bytes') cfg['bucket'] = config.get('test', 'bucket') cfg['uid'] = config.get('test', 'uid') cfg['nagios_host'] = config.get('send_nsca', 'nagios_host') return cfg
def init(): global gas, dm, output global M_dm, a_dm, N_dm, M_gas, a_gas, N_gas, Z global truncation_radius, gamma_gas, gamma_dm flags = parser(description="Generates an initial conditions file\ for a galaxy cluster halo simulation.") flags.add_argument('--no-dm', help='No dark matter particles in the\ initial conditions. The dark matter potential is\ still used when calculating the gas temperatures.', action='store_true') flags.add_argument('--no-gas', help='Gas is completely ignored, and\ only dark matter is included.', action='store_true') flags.add_argument('-o', help='The name of the output file.', metavar="init.dat", default="init.dat") args = flags.parse_args() output = args.o if not path.isfile("params_cluster.ini"): print "params_cluster.ini missing." exit(0) if args.no_dm: if args.no_gas: print "Neither gas or dark matter were selected!" exit(0) else: gas = True dm = False elif args.no_gas: gas = False dm = True else: gas = True dm = True config = ConfigParser() config.read("params_cluster.ini") M_dm = config.getfloat('dark_matter', 'M_dm') a_dm = config.getfloat('dark_matter', 'a_dm') N_dm = config.getint('dark_matter', 'N_dm') gamma_dm = config.getfloat('dark_matter', 'gamma_dm') if (gas): M_gas = config.getfloat('gas', 'M_gas') a_gas = config.getfloat('gas', 'a_gas') N_gas = config.getint('gas', 'N_gas') Z = config.getfloat('gas', 'Z') gamma_gas = config.getfloat('gas', 'gamma_gas') truncation_radius = config.getfloat('global', 'truncation_radius')
def create_config(config_path): config = dict() config_raw = ConfigParser() config_raw.read(DEFAULT_CONFIG) config_raw.read(config_path) ### Config config['docheck'] = config_raw.getboolean('Edit Watcher', 'ENABLE') config['db_extra_dbname'] = config_raw.get('Edit Watcher', 'EXTRA_PORTAL_DB_NAME') config['db_extra_table'] = config_raw.get('Edit Watcher', 'EXTRA_PORTAL_TABLE') config['webhook'] = config_raw.get('Edit Watcher', 'WEBHOOK_URL') config['embed_username'] = config_raw.get('Edit Watcher', 'USERNAME') config['embed_image'] = config_raw.get('Edit Watcher', 'IMAGE') config['embed_location_title'] = config_raw.get('Edit Watcher', 'LOCATION_EDIT_TITLE') config['embed_title_title'] = config_raw.get('Edit Watcher', 'TITLE_EDIT_TITLE') config['embed_image_title'] = config_raw.get('Edit Watcher', 'IMAGE_EDIT_TITLE') config['embed_deleted_title'] = config_raw.get('Edit Watcher', 'DELETED_TITLE') config['embed_from'] = config_raw.get('Edit Watcher', 'FROM') config['embed_to'] = config_raw.get('Edit Watcher', 'TO') config['deleted_maxcount'] = config_raw.getint('Edit Watcher', 'DELETED_LIMIT') config['deleted_maxtime'] = config_raw.get('Edit Watcher', 'DELETED_TIMESPAN') config['lat_small'] = config_raw.getfloat('Config', 'MIN_LAT') config['lat_big'] = config_raw.getfloat('Config', 'MAX_LAT') config['lon_small'] = config_raw.getfloat('Config', 'MIN_LON') config['lon_big'] = config_raw.getfloat('Config', 'MAX_LON') config['db_scan_schema'] = config_raw.get('DB', 'SCANNER_DB') config['db_portal_schema'] = config_raw.get('DB', 'PORTAL_DB') config['db_host'] = config_raw.get('DB', 'HOST') config['db_port'] = config_raw.getint('DB', 'PORT') config['db_user'] = config_raw.get('DB', 'USER') config['db_pass'] = config_raw.get('DB', 'PASSWORD') config['db_portal_dbname'] = config_raw.get('DB', 'PORTAL_DB_NAME') config['db_portal_table'] = config_raw.get('DB', 'PORTAL_TABLE') config['db_portal_id'] = config_raw.get('DB', 'PORTAL_ID') config['db_portal_lat'] = config_raw.get('DB', 'PORTAL_LAT') config['db_portal_lon'] = config_raw.get('DB', 'PORTAL_LON') config['db_portal_name'] = config_raw.get('DB', 'PORTAL_NAME') config['db_portal_img'] = config_raw.get('DB', 'PORTAL_IMAGE') config['db_portal_updated'] = config_raw.get('DB', 'PORTAL_UPDATED') return config
def parse_config(config_file): if not os.path.isfile(config_file): sys.exit("Could not find configuration file: {0}".format(config_file)) parser = ConfigParser() parser.read(config_file) log_file = None loggly_token = None if parser.getboolean('file_log', 'enabled'): log_file = parser.get('file_log', 'file') do_logging(log_file, loggly_token) config = {} if parser.getboolean('loggly_log', 'enabled'): config['loggly_token'] = parser.get('loggly_log', 'token') config['mongo_db'] = parser.get('mongodb', 'database') if os.getenv("REMOTE_MONGO") == "true": config['mongo_host'] = os.getenv("MONGO_HOST") config['mongo_port'] = int(os.getenv("MONGO_PORT")) config['mongo_auth'] = False if os.getenv("MONGO_AUTH") == "true": config['mongo_auth'] = True config['mongo_user'] = os.getenv("MONGO_USER") config['mongo_password'] = os.getenv("MONGO_PASSWORD") config['mongo_auth_mechanism'] = os.getenv("MONGO_AUTH_MECHANISM") else: config['mongo_auth'] = False config['mongo_host'] = "127.0.0.1" config['mongo_port'] = 27017 config['hpf_feeds'] = parser.get('hpfriends', 'channels').split(',') config['hpf_ident'] = parser.get('hpfriends', 'ident') config['hpf_secret'] = parser.get('hpfriends', 'secret') config['hpf_port'] = parser.getint('hpfriends', 'port') config['hpf_host'] = parser.get('hpfriends', 'host') config['webapi_port'] = parser.getint('webapi', 'port') config['webapi_host'] = parser.get('webapi', 'host') config['normalizer_ignore_rfc1918'] = parser.getboolean( 'normalizer', 'ignore_rfc1918') return config
def raw_binary(path): path = os.path.expanduser(path) header_path = get_header_path(path) header = ConfigParser() header.read(header_path) dtype = header.get('header', 'dtype') length = header.getint('header', 'length') nb_channels = header.getint('header', 'nb_channels') sampling_rate = header.getfloat('header', 'sampling_rate') raw = RawBinary(path, dtype, length, nb_channels, sampling_rate) data = raw.load() return data
def parseConfigurationFile(self, configFile): """ Parse the configuration file to get base model parameters """ # Initialize defaults defaultParams = {} # CUDA kernels are defined externally in a .cu file defaultParams["cu_dir"] = os.path.join("pyhawkes", "cuda", "cpp") defaultParams["cu_file"] = "process_id_kernels.cu" defaultParams["thin"] = 1 defaultParams["sigma"] = 0.001 defaultParams["kappa"] = 5 defaultParams["nu"] = 4 defaultParams["mu"] = "None" # Create a config parser object and read in the file cfgParser = ConfigParser(defaultParams) cfgParser.read(configFile) self.params = {} self.params["cu_dir"] = cfgParser.get("proc_id_model", "cu_dir") self.params["cu_file"] = cfgParser.get("proc_id_model", "cu_file") self.params["thin"] = cfgParser.getint("proc_id_model", "thin") self.params["blockSz"] = cfgParser.getint("cuda", "blockSz") # Parse the params for the spatial GMM model self.params["sigma0"] = cfgParser.getfloat("proc_id_model", "sigma") self.params["kap0"] = cfgParser.getfloat("proc_id_model", "kappa") self.params["nu0"] = cfgParser.getfloat("proc_id_model", "nu") # Parse mu0from config file mu0_str = cfgParser.get("proc_id_model", "mu") if mu0_str == "None": # If not specified, take the mean of the data self.params["mu0"] = np.mean(self.base.data.X, 1) else: # Filter out unwanted characters mu0_str = filter( lambda c: c.isdigit() or c == "," or c == "-" or c == ".", mu0_str) self.params["mu0"] = np.fromstring(mu0_str, sep=",", dtype=np.float32) self.params["T0"] = self.params["sigma0"] * np.eye(self.base.data.D) # Parse the desired number of mixture components/processes self.params["K"] = cfgParser.getint("proc_id_model", "K")
def _load_configuration(self): """ Loads the configuration settings from the application-specific configuration file """ config = ConfigParser() self._config = config read_files = config.read(self._app_config_path) if len(read_files) is not 1: raise Exception( "Error attempting to read application configuration file: {0}". format(self._app_config_path)) # # Load incoming pool settings # try: self._incoming_queue_size = config.getint( self.INCOMING_MESSAGE_POOL_CONFIG_SECTION, self.QUEUE_SIZE_CONFIG_PROP) except: pass try: self._incoming_thread_count = config.getint( self.INCOMING_MESSAGE_POOL_CONFIG_SECTION, self.THREAD_COUNT_CONFIG_PROP) except: pass # # Load callback pool settings # try: self._callbacks_queue_size = config.getint( self.MESSAGE_CALLBACK_POOL_CONFIG_SECTION, self.QUEUE_SIZE_CONFIG_PROP) except: pass try: self._callbacks_thread_count = config.getint( self.MESSAGE_CALLBACK_POOL_CONFIG_SECTION, self.THREAD_COUNT_CONFIG_PROP) except: pass self.on_load_configuration(config)
def single(section, simfolder, uber_conf, config_file): ''' Start runs for an individual configuration. Look up the right seed if seeds are configured. :param string section: section name - this is the name the data folder gets :param string simfolder: relative path to simfolder :param CinfigParser uber_conf: the main simulation configuration :param string config_file: name of the config file for this run ''' if not simfolder == ".": os.chdir(simfolder) conf = ConfigParser() conf.read('%s' % config_file) data_dirname = 'data' section_dirname = "%s/%s" % (data_dirname, section) # make sure data section dir exists + copy actual config file there if not osp.exists(section_dirname): os.makedirs(section_dirname) Popen('cp %s %s' % (config_file, section_dirname), shell=True).wait() start_run = 1 if conf.has_option('control', 'start_run'): start_run = conf.getint('control', 'start_run') for run in xrange(start_run, conf.getint('control', 'runs') + start_run, 1): print ".", sys.stdout.flush() seed = '' if uber_conf.has_section('seeds'): try: seed = uber_conf.get('seeds', str(run)) except Exception, e: print "[Nicessa] There is no seed specified for run %d" % run else: random.seed() logfile = "%s/log%d.dat" % (section_dirname, run) dat = open(logfile, 'w') dat.write("# Log for run %d in simulation %s \n" % (run, conf.get('meta', 'name'))) dat.flush() dat.close() Popen("%s %s %s %s" % (conf.get('control', 'executable'), logfile, config_file, seed), shell=True).wait()
def init_settings(argv): """ Populates SETTINGS with key-value pairs from configuration file. """ conf = ConfigParser() conf.read(argv[1]) SETTINGS['logfile'] = conf.get('seeder', 'logfile') SETTINGS['debug'] = conf.getboolean('seeder', 'debug') SETTINGS['export_dir'] = conf.get('seeder', 'export_dir') SETTINGS['min_height'] = conf.getint('seeder', 'min_height') SETTINGS['min_age'] = conf.getint('seeder', 'min_age') SETTINGS['zone_file'] = conf.get('seeder', 'zone_file') SETTINGS['template'] = conf.get('seeder', 'template') SETTINGS['a_records'] = conf.getint('seeder', 'a_records') SETTINGS['aaaa_records'] = conf.getint('seeder', 'aaaa_records')
def loadConfig(self, filename): config = ConfigParser() if filename: config.read(filename) for i in range(len(self.group_digital)): section = "DIGITAL%d" % i self.group_digital[i][0].setText( config.get(section, 'name').decode('utf-8')) checkable = config.getint(section, 'checkable') if checkable == 1: self.group_digital[i][1].setCheckState(Qt.Checked) else: self.group_digital[i][1].setCheckState(Qt.Unchecked) for i in range(len(self.group_press)): section = "PRESSURE%d" % i self.group_press[i][0].setText( config.get(section, 'name').decode('utf-8')) checkable = config.getint(section, 'checkable') if checkable == 1: self.group_press[i][1].setCheckState(Qt.Checked) else: self.group_press[i][1].setCheckState(Qt.Unchecked) self.group_press[i][2].setCurrentIndex( config.getint(section, 'value')) if config.getint('SPEED', 'speed') == 1: self.speed_chk.setCheckState(Qt.Checked) else: self.speed_chk.setCheckState(Qt.Unchecked) self.trigger_combo.setCurrentIndex( config.getint('SPEED', 'pattern')) self.projectName_edit.setText( config.get('LOG', 'project').decode('utf-8')) self.person_edit.setText( config.get('LOG', 'person').decode('utf-8')) #parameters self.testProject.setCurrentIndex( config.getint('PARAMETERS', 'progect')) self.testTimer.setValue(config.getfloat('PARAMETERS', 'timer')) self.yulei = config.getfloat('PARAMETERS', 'yulei') self.youer = config.getfloat('PARAMETERS', 'youer') self.default_digital.setCurrentIndex( config.getint('PARAMETERS', 'default_digital')) self.default_press.setCurrentIndex( config.getint('PARAMETERS', 'default_press')) self.threshold_press = config.getfloat('PARAMETERS', 'threshold_press') for i, j in enumerate(['pc1', 'pc2', 'pc3', 'pc4', 'pc5', 'pc6']): self.calibration[i] = config.getfloat('CALIBRATION', j) #print self.calibration if self.testProject.currentIndex() == 0: self.measureDistance = self.yulei else: self.measureDistance = self.youer self.test_shift.setValue(self.measureDistance) self.measureTime = self.testTimer.value()
def read_ini(): conf = ConfigParser() conf.read( os.path.join( os.path.split(os.path.realpath(__file__))[0], "app_namespace.ini")) namespace = {} namespace['ini'] = conf.get('start', 'appini') namespace['log'] = conf.get('start', 'applog') namespace['project_structure'] = conf.get('start', 'project_structure').split(',') namespace['project_ini'] = conf.get('start', 'project_ini').split(':') namespace['JSS_support'] = conf.get('start', 'JSS_support').split(',') namespace['monitor_time'] = conf.getfloat('start', 'monitor_time') namespace['config_head'] = conf.get('start', 'config_head') namespace['data_format'] = conf.get('start', 'data_format').split(',') namespace['process_assignments'] = conf.get('process', 'assignments').split(',') namespace['process_status'] = conf.get('process', 'status').split(',') namespace['process_pat_per_job'] = conf.getint('process', 'pat_per_job') namespace['max_jobs_per_run'] = conf.getint('process', 'max_jobs_per_run') process_colors = conf.get('process', 'colors').split(',') namespace['process_colors'] = [[0, 0, 0]] * len(process_colors) for i, cl in enumerate(process_colors): tmp = cl.split('.') namespace['process_colors'][i] = [ int(tmp[0]), int(tmp[1]), int(tmp[2]) ] namespace['darkcal'] = conf.get('process', 'darkcal') namespace['classify_decomp'] = conf.get('classify', 'decomp').split(',') namespace['merge_sym'] = conf.get('merge', 'sym').split(',') namespace['phasing_method'] = conf.get('phasing', 'method').split(',') # now add function nickname namespace['process_HF'] = conf.get('process', 'HF') namespace['process_FA'] = conf.get('process', 'FA') namespace['process_FAA'] = conf.get('process', 'FAA') namespace['process_AP'] = conf.get('process', 'AP') namespace['process_CLF'] = "decomp" namespace['process_MRG'] = "merge" namespace['process_PHS'] = "phasing" namespace['classify_SVD'] = conf.get('classify', 'SVD') namespace['classify_LLE'] = conf.get('classify', 'LLE') namespace['classify_SPEM'] = conf.get('classify', 'SPEM') namespace['classify_TSNE'] = conf.get('classify', 'TSNE') namespace['phasing_RAAR'] = conf.get('phasing', 'RAAR') namespace['phasing_DM'] = conf.get('phasing', 'DM') namespace['phasing_ERA'] = conf.get('phasing', 'ERA') namespace['merge_ICOSYM'] = conf.get('merge', 'ICOSYM') return namespace
def getMysqlConfig(self, db = 'test'): """获取mysql连接配置 - 依赖配置文件[conf/config.ini],节点[db] Returns: dbconfig dict. """ try: cf = ConfigParser() cf.read('conf/config.ini') dbconfig = { 'host': cf.get('db', 'host'), 'port': cf.getint('db', 'port'), 'user': cf.get('db', 'user'), 'passwd': cf.get('db', 'passwd'), 'db': db } return dbconfig except Exception as e: error = """Can't load config from [conf/config.ini] or [db] node doesn't exist.\n Please make sure this file.""" logging.warning(error) print(error) raise Exception(e)
def create_config(config_path): """ Parse config. """ config = dict() config_raw = ConfigParser() config_raw.read(DEFAULT_CONFIG) config_raw.read(config_path) config['db_r_host'] = config_raw.get('DB', 'HOST') config['db_r_name'] = config_raw.get('DB', 'NAME') config['db_r_user'] = config_raw.get('DB', 'USER') config['db_r_pass'] = config_raw.get('DB', 'PASSWORD') config['db_r_port'] = config_raw.getint('DB', 'PORT') config['db_r_charset'] = config_raw.get('DB', 'CHARSET') config['db_gym'] = config_raw.get('DB', 'TABLE_GYM') config['db_gym_id'] = config_raw.get('DB', 'TABLE_GYM_ID') config['db_gym_name'] = config_raw.get('DB', 'TABLE_GYM_NAME') config['db_gym_image'] = config_raw.get('DB', 'TABLE_GYM_IMAGE') config['db_pokestop'] = config_raw.get('DB', 'TABLE_POKESTOP') config['db_pokestop_id'] = config_raw.get('DB', 'TABLE_POKESTOP_ID') config['db_pokestop_name'] = config_raw.get('DB', 'TABLE_POKESTOP_NAME') config['db_pokestop_image'] = config_raw.get('DB', 'TABLE_POKESTOP_IMAGE') config['username'] = config_raw.get('Ingress', 'USERNAME') config['pwd'] = config_raw.get('Ingress', 'PASSWORD') config['cookies'] = config_raw.get('Ingress', 'COOKIES') config['encoding'] = config_raw.get('Other', 'ENCODING') return config
def parse_config(self, config_file): if not os.path.isfile(config_file): logger.critical( "Could not find configuration file: {0}".format(config_file)) sys.exit( "Could not find configuration file: {0}".format(config_file)) parser = ConfigParser() parser.read(config_file) config = {} config['mysqlserver'] = parser.get('mysql', 'server') config['mysqldb'] = parser.get('mysql', 'database') config['mysqluser'] = parser.get('mysql', 'user') config['mysqlpass'] = parser.get('mysql', 'password') config['hpf_channels'] = parser.get('hpfeeds', 'channels').split(',') config['hpf_ident'] = parser.get('hpfeeds', 'ident') config['hpf_secret'] = parser.get('hpfeeds', 'secret') config['hpf_port'] = parser.getint('hpfeeds', 'port') config['hpf_host'] = parser.get('hpfeeds', 'host') config['vt_enabled'] = parser.get('virus_total', 'enabled') config['vt_api_key'] = parser.get('virus_total', 'apikey') return config
def __init__(self, config_file): """Function initializes the logger Args: config_file : name of the honeypot configuration file """ logger.debug('Initializing database logger.') parser = ConfigParser() parser.read(config_file) self.enabled = False self.connection = None self.max_retries = 5 try: if parser.getboolean("mysql", "enabled"): self.host = parser.get("mysql", "host") self.port = int(parser.getint("mysql", "port")) self.db = parser.get("mysql", "db") self.username = parser.get("mysql", "username") self.passphrase = parser.get("mysql", "passphrase") self.logdevice = parser.get("mysql", "logdevice") self.logsocket = parser.get("mysql", "logsocket") self.enabled = True gevent.spawn(self._connect) except (NoSectionError, NoOptionError): logger.exception( 'Exception: Incomplete honeyd.cfg configuration. MySQL logging is disabled.' ) self.enabled = False