def __init__(self, config_filename): locale.setlocale(locale.LC_ALL, '') assert os.path.isfile(config_filename), "Config file not found" local_config_parser = ConfigParser() local_config_parser.read(config_filename) product_info_filename = local_config_parser.get("Config", "info_produtos") self._printer_name = local_config_parser.get("Config", "impressora") assert os.path.isfile(product_info_filename), "Product info file not found" # Set barcode filename self._barcode_filename = os.path.join( os.path.dirname(product_info_filename), "barcode" ) cfg_parser = ConfigParser() cfg_parser.read(product_info_filename) self._primary_categories = dict(cfg_parser.items(self.PRIMARY_CATEGORY_SEC)) self._secondary_categories = dict(cfg_parser.items(self.SECONDARY_CATEGORY_SEC)) if cfg_parser.has_section(self.PRICE_SEC): self.price_list = [] for opt in sorted(cfg_parser.options(self.PRICE_SEC)): self.price_list.append(cfg_parser.getfloat(self.PRICE_SEC, opt)) else: self.price_list = [1.7, 2.21] self._label_header = cfg_parser.get("Label", "header").replace("\\n","\n") self._label_template = cfg_parser.get("Label", "label") self._labels_per_file = 30 self._product_unity = "pç" self._category_on_label = cfg_parser.getint("Geral", "cat_etiqueta")
def _convert(self): from ConfigParser import ConfigParser cp=ConfigParser() cp.read(self.filename) for section in [ 'main', 'default' ]: if cp.has_section( section ): for key, val in cp.items(section): key = key.lower().replace('_', '') self.repo_cfg[ "%s.%s" % (section, key) ] = val for section in filter( lambda n: n.startswith('store '), cp.sections() ): for key, val in cp.items(section): key = key.lower().replace('_', '') self.repo_cfg[ "store.%s.%s" % (section[6:].strip(' "'), key) ] = val if not self.repo_cfg.has_key( "store.%s.enabled" % section[6:].strip(' "') ): self.repo_cfg[ "store.%s.enabled" % section[6:].strip(' "') ] = "true" tmpfile = "%s.%d.%s" % ( self.filename, os.getpid(), ".tmp" ) try: self.save( tmpfile ) os.rename( tmpfile, self.filename ) except: os.unlink( tmpfile ) raise
def _reload_options(self): try: self.ctime = os.stat(self.cpath).st_mtime cfg = ConfigParser() cfg.read(self.cpath) except OSError: raise ConfigFileNotFoundError(self.cpath, source="default config") sections = cfg.sections() if len(sections) < 1: raise ImproperConfigFileError(self.cpath, _(u"%s should have at least one section" % self.cpath)) options = Struct() options.main = mainsect = dict(cfg.items("main")) options.sections = sections ## if not(options.has_key("module") ^ options.has_key("path")): ## raise ImproperConfigFileError(self.cpath, ## _(u"%s should have a ``module`` or ``path`` option, but not both" % self.cpath)) if not (mainsect.has_key("module") ^ mainsect.has_key("path")): raise ImproperConfigFileError( self.cpath, _(u"%s should have a ``module`` or ``path`` option, but not both" % self.cpath) ) for sectname in sections[1:]: options[sectname] = dict(cfg.items(sectname)) self.hotplug = mainsect.get("hotplug", False) self.module = mainsect.get("module", None) self.mpath = mainsect.get("path", "") self.class_ = mainsect.get("class", "") return options
def parse_image_build_config(self, config_file_name): # Logic taken from koji.cli.koji.handle_image_build. # Unable to re-use koji's code because "cli" is not # a package of koji and this logic is intermingled # with CLI specific instructions. args = [] opts = {} config = ConfigParser() config.readfp(self.get_default_image_build_conf()) config.read(config_file_name) if self.architectures: config.set('image-build', 'arches', ','.join(self.architectures)) elif self.architecture: config.set('image-build', 'arches', self.architecture) # else just use what was provided by the user in image-build.conf config_str = StringIO() config.write(config_str) self.log.debug('Image Build Config: \n%s', config_str.getvalue()) image_name = None section = 'image-build' for option in ('name', 'version', 'arches', 'target', 'install_tree'): value = config.get(section, option) if not value: raise ValueError('{} cannot be empty'.format(option)) if option == 'arches': value = [arch for arch in value.split(',') if arch] elif option == 'name': image_name = value args.append(value) config.remove_option(section, option) for option, value in config.items(section): if option in ('repo', 'format'): value = [v for v in value.split(',') if v] elif option in ('disk_size'): value = int(value) opts[option] = value section = 'ova-options' if config.has_section(section): ova = [] for k, v in config.items(section): ova.append('{}={}'.format(k, v)) opts['ova_option'] = ova section = 'factory-parameters' if config.has_section(section): factory = [] for option, value in config.items(section): factory.append((option, value)) opts['factory_parameter'] = factory return image_name, args, {'opts': opts}
class Playlist(object): def __init__(self, location): dir = os.path.expanduser(os.path.dirname(location)) if not os.path.exists(dir): os.makedirs(dir) self._config = ConfigParser() self._config.optionxform = str self._file = os.path.expanduser(location) if os.path.exists(self._file): self._config.read(self._file) else: self._config.add_section('Playlist') self.save() def save(self): self._config.write(open(self._file, 'wb')) def append(self, item): self._config.set('Playlist', *item) self.save() def remove(self, option): self._config.remove_option('Playlist', option) self.save() def __getitem__(self, item): return self._config.items('Playlist')[item] def __iter__(self): return iter(self._config.items('Playlist'))
def read_file(self): self.validate_config() config = ConfigParser() config.read(self.hosts_file) sections = config.sections() host_store = {} self.logger.debug("Extracting all List sections if mandatory, and store " "it in host_store as a List with key value as list section_name") for section_name in self.list_sections: if section_name in sections: count = config.getint(section_name, TOTAL_KEY) try: prefix = config.get(section_name, PREFIX_KEY) except NoOptionError: prefix = section_name try: postfix = config.get(section_name, POSTFIX_KEY) except NoOptionError: postfix = '' host_store[section_name] = [] for index in range(count): index = index + 1 child_section = '%s%s%s' % (prefix, index, postfix) items = config.items(child_section) host_store[section_name].append(dict(items)) sections.remove(child_section) sections.remove(section_name) self.logger.debug("Extracting all other configurations from file and " "store it as a dictionary with key value as section_name") for section_name in sections: items = config.items(section_name) host_store[section_name] = dict(items) return host_store
def parse_config(self, config_filename): if not config_filename: config_filename = os.path.join(os.path.dirname(__file__), 'configs', 'default.ini') if not os.path.exists(config_filename): config_filename = os.path.join(os.path.dirname(__file__), 'configs', config_filename) if not os.path.exists(config_filename): self.logger.error('Could not open configuration file %s', config_filename) sys.exit(1) config = ConfigParser() config.read(config_filename) self.logger.info('Loaded configuration file: %s', config_filename) # Parse configuration for section in config.sections(): if section == 'FakeNet': self.fakenet_config = dict(config.items(section)) elif section == 'Diverter': self.diverter_config = dict(config.items(section)) elif config.getboolean(section, 'enabled'): self.listeners_config[section] = dict(config.items(section))
def config_from_file(config_file): global config global template_dirs global template_loader # Read configuration here = os.path.abspath('.') confp = ConfigParser(defaults={'here': here}) confp.read(config_file) for sec in ('stats', 'checkers', 'alerts', 'mailer'): config[sec] = dict(confp.items(sec)) # Setup template loader template_dirs = [ os.path.abspath('./templates')] template_loader.search_path.extend(template_dirs) # Load and register additional checkers for name, cls_name in confp.items('checkers'): if name != 'here': cls = _load_checker(name, cls_name) logging.info('Loaded checker %s: %r', name, cls) return
def read_cfg(floc, cfg_proc=process_cfg): """ Reads the given configuration file, returning a dict with the converted values supplemented by default values. :param floc: The location of the file to read. :param cfg_proc: The processor to use for the raw configuration values. Uses default values when the raw value is missing. :return: A dict of the processed configuration file's data. """ config = ConfigParser() try: good_files = config.read(floc) except ParsingError as e: raise InvalidDataError(e) if not good_files: raise IOError('Could not read file {}'.format(floc)) main_proc = cfg_proc(dict(config.items(MAIN_SEC)), DEF_CFG_VALS, REQ_KEYS, int_list=False) # Check that there is a least one subsection, or this script won't do anything. Check that all sections given # are expected or alert user that a given section is ignored (thus catches types, etc.) no_work_to_do = True for section in config.sections(): if section in SECTIONS: if section in SUB_SECTIONS: if len(config.items(section)) > 0: no_work_to_do = False else: warning("Found section '{}', which will be ignored. Expected section names are: {}" .format(section, ", ".join(SECTIONS))) if no_work_to_do: warning("No filtering will be applied as no criteria were found for the expected subsections ({})." "".format(", ".join(SUB_SECTIONS))) for section in [MAX_SEC, MIN_SEC]: main_proc[section] = check_vals(config, section) main_proc[BIN_SEC] = get_bin_data(config, BIN_SEC) return main_proc
def __init__(self): conf = ConfigParser() conf.read('conf.ini') self.log_url = 'https://xueqiu.com/user/login' self.log_data = dict(conf.items('account')) self.header = dict(conf.items('header'))
def _create_configs_from_file(filename, cluster_config_dir, wildcards): configurations = ConfigParser(allow_no_value=True) configurations.read(filename) for config_file in configurations.sections(): logger.info("Updating %s...", config_file) # For XML configuration files, run things through XmlConfiguration. if config_file.endswith('.xml'): XmlConfiguration( {item[0]: item[1].format(**wildcards) for item in configurations.items(config_file)} ).write_to_file(join(cluster_config_dir, config_file)) # For everything else, recognize whether a line in the configuration should simply be # appended to the bottom of a file or processed in some way. The presence of +++ will # lead to the evaluation of the following string through the end of the line. else: lines = [] for item in configurations.items(config_file): if item[0].startswith('+++'): command = item[0].lstrip('+ ').format(**wildcards) # Yes, we use eval here. This is potentially dangerous, but intention. lines.append(str(eval(command))) # pylint: disable=eval-used elif item[0] == "body": lines.append(item[1].format(**wildcards)) else: lines.append(item[0].format(**wildcards)) with open(join(cluster_config_dir, config_file), 'w') as conf: conf.write("".join(["{0}\n".format(line) for line in lines]))
def _get_args(): try: # py2 from ConfigParser import ConfigParser except ImportError: # py3 from configparser import ConfigParser args = docopt.docopt(__doc__, version=seltest.__version__, argv=sys.argv[1:]) config_path = args['--config'] or _find_config() config = {} profile_config = {} if config_path: config_path = _expand_path(config_path) # allow_no_value so we can write `-v`, not `-v=True` cp = ConfigParser(allow_no_value=True) cp.read(config_path) # this allows -v to mean -v=True, not -v=None config = dict((key, True if value is None else value) for key, value in cp.items('default')) profile_name = args['--config-profile'] if profile_name: profile_config = dict((key, True if value is None else value) for key, value in cp.items(profile_name)) config = _merge_config_dicts(config, DEFAULTS) config = _merge_config_dicts(profile_config, config) config = _merge_config_dicts(args, config) return config
def parse_config(configs): """ :type configs list :rtype: ConfigParser """ conf = ConfigParser() all_configs = [] while len(configs) > 0: all_configs += configs files = [] for mask in configs: for f in glob.glob(mask): if os.path.isfile(f): files.append(f) conf.read(files) configs = [] if conf.has_option(DEFAULT_SECTION, "include"): configs = list(set(re.split(r'\s+', conf.get(DEFAULT_SECTION, "include"))) - set(all_configs)) for section in conf.sections(): for k, v in conf.items(DEFAULT_SECTION): if not conf.has_option(section, k): conf.set(section, k, v) for k, v in conf.items(section): v = re.sub(r'^\s*"|"\s*$', '', v) # remove quotes conf.set(section, k, v) conf.remove_section(DEFAULT_SECTION) if not conf.sections(): usage("No sections found in config files " + ", ".join(all_configs)) return conf
def get_app_info(app_dir, app): app_info = {'app_key': app} dev_dir = join(app_dir, app, DEVELOP_DIR) if isdir(dev_dir) and exists(join(dev_dir, CER_FILE)) \ and exists(join(dev_dir, KEY_FILE)): # 读配置文件 conf = join(dev_dir, CONF_FILE) conf_dict = {} if exists(conf): config = ConfigParser() config.read(conf) conf_dict = dict(config.items('apnsagent')) app_info['develop'] = {'cer_file': join(dev_dir, CER_FILE), 'key_file': join(dev_dir, KEY_FILE), 'config': conf_dict} pro_dir = join(app_dir, app, PRODUCTION_DIR) if isdir(pro_dir) and exists(join(pro_dir, CER_FILE)) \ and exists(join(pro_dir, KEY_FILE)): conf = join(pro_dir, CONF_FILE) log.debug('config file: %s' % conf) conf_dict = {} if exists(conf): log.debug('load config file') config = ConfigParser() config.read(conf) conf_dict = dict(config.items('apnsagent')) log.debug('config content %s' % conf_dict) app_info['production'] = {'cer_file': join(pro_dir, CER_FILE), 'key_file': join(pro_dir, KEY_FILE), 'config': conf_dict} return app_info
def read_config_file(filename): """Return Settings instance built from contents of ``filename`` file.""" parser = ConfigParser() parser.read(filename) settings = Settings() core_section = 'macman' default_section = 'default' # Handle core configuration. if parser.has_section(core_section): section = core_section for option in ['directory']: if parser.has_option(section, option): setattr(settings, option, parser.get(section, option)) # Handle default configuration. if parser.has_section(default_section): section = settings.default settings.default = dict(parser.items(section)) # Handle configuration of VMs. special_sections = (core_section, default_section) for section in parser.sections(): if section in special_sections: continue vm_id = section settings.vms[vm_id] = dict(parser.items(section)) return settings
def __init__(self): conf = ConfigParser() conf.read('conf.ini') self.url = 'http://mm.taobao.com/json/request_top_list.htm?page=' self.login_data = dict(conf.items('account')) self.headers = dict(conf.items('header')) self.ses = requests.session()
def __init__(self): conf = ConfigParser() conf.read('conf.ini') self.url = 'http://www.mace.manchester.ac.uk' self.login_data = dict(conf.items('account')) self.headers = dict(conf.items('header')) self.ses = requests.session()
def setUp(self): self.log("setUp()") self.log("test directory="+self.test_dir) # 1 read config file and pull out some stuff if not os.path.isfile(os.path.join(self.test_dir,'test.cfg')): raise ValueError('test.cfg does not exist in test dir') config = ConfigParser() config.read(os.path.join(self.test_dir,'test.cfg')) self.test_cfg = {} self.nms_test_cfg = {} for entry in config.items('catalog-test'): self.test_cfg[entry[0]] = entry[1] for entry in config.items('NarrativeMethodStore'): self.nms_test_cfg[entry[0]] = entry[1] self.log('test.cfg parse\n'+pformat(self.test_cfg)) # passwords not needed in tests yet self.test_user_1 = self.test_cfg['test-user-1'] #self.test_user_psswd_1 = self.test_cfg['test-user-psswd-1'] self.test_user_2 = self.test_cfg['test-user-2'] #self.test_user_psswd_2 = self.test_cfg['test-user-psswd-2'] # 2 check that db exists and collections are empty self.mongo = MongoClient('mongodb://'+self.test_cfg['mongodb-host']) db = self.mongo[self.test_cfg['mongodb-database']] self.modules = db[MongoCatalogDBI._MODULES] self.developers = db[MongoCatalogDBI._DEVELOPERS] # just drop the test db self.modules.drop() self.developers.drop() #if self.modules.count() > 0 : # raise ValueError('mongo database collection "'+MongoCatalogDBI._MODULES+'"" not empty (contains '+str(self.modules.count())+' records). aborting.') self.initialize_mongo() # 3 setup the scratch space self.scratch_dir = os.path.join(self.test_dir,'temp_test_files',str(datetime.now())) self.log("scratch directory="+self.scratch_dir) os.makedirs(self.scratch_dir) # 4 startup any dependencies (nms, docker registry?) # 4 assemble the config file for the catalog service self.catalog_cfg = { 'admin-users':self.test_user_2, 'mongodb-host':self.test_cfg['mongodb-host'], 'mongodb-database':self.test_cfg['mongodb-database'], 'temp-dir':self.scratch_dir, 'docker-base-url':self.test_cfg['docker-base-url'], 'docker-registry-host':self.test_cfg['docker-registry-host'], 'nms-url':self.test_cfg['nms-url'], 'nms-admin-user':self.test_cfg['nms-admin-user'], 'nms-admin-psswd':self.test_cfg['nms-admin-psswd'] }
def validate_config(self): config = ConfigParser() config.read(self.hosts_file) sections = config.sections() self.logger.debug("Validating mandatory sections") for section_name in self.mandatory_sections: if section_name not in sections: raise NoSectionError(section_name) self.logger.debug("Validating sections to be extracted as list") for section_name in self.list_sections: if section_name in sections: try: count = config.getint(section_name, TOTAL_KEY) except NoOptionError: self.logger.error( "'%s' option not specified in mandatory" " list section '%s'" % (TOTAL_KEY, section_name) ) raise NoOptionError(TOTAL_KEY, section_name) try: prefix = config.get(section_name, PREFIX_KEY) except NoOptionError: prefix = section_name self.prefix_store[section_name] = prefix try: postfix = config.get(section_name, POSTFIX_KEY) except NoOptionError: postfix = "" self.postfix_store[section_name] = postfix for index in range(count): index = index + 1 child_section = "%s%s%s" % (prefix, index, postfix) try: config.items(child_section) except NoSectionError: self.logger.error( "'%s' section not specified for" " parent section '%s'. '%s' is a" " list section with count '%s'." % (child_section, section_name, section_name, count) ) raise NoSectionError( "'%s' section not specified for" " parent section '%s'. '%s' is a" " list section with count '%s'." % (child_section, section_name, section_name, count) ) sections.remove(child_section) sections.remove(section_name) self.logger.debug("Validating sections are within ths section_schema," " if specified") if self.section_schema is not None: for section_name in self.section_schema: try: sections.remove(section_name) except ValueError: pass if len(sections) != 0: self.logger.error("'Invalid sections added. %s" % (sections)) raise Error("Invalid sections added. %s" % (sections))
def _loadFeatConf(self, location): """ Load configuration needed for feature extraction... """ log.debug(unicode(self) + " loading config file at '%s'." % (location)) with codecs.open(location, encoding="utf-8") as fh: featcfp = ConfigParser() featcfp.readfp(fh) return list(featcfp.items("HCOPY")) + list(featcfp.items("GLOBAL"))
def from_ini(cls, fname, args=None): print "This function is being deprecated and replaced by from_yaml classmethod" config_parse_object = ConfigParser() config_parse_object.read(fname) general_dict = dict(config_parse_object.items('general')) abundance_dict = dict(config_parse_object.items('abundances')) config_object = cls() config_object.parse_general_section(general_dict) config_object.parse_abundance_section(abundance_dict) return config_object
def configure(): global BUILD_ID, cformat # Parse command line arguments options, args = parse_args() # Load the configuration from the file conf = ConfigParser() conf.read(options.conf) sections = conf.sections() if 'global' in sections: glow = dict((k.lower(), k) for k in globals()) for k, v in conf.items('global'): key = glow.get(k.lower()) if key: conv = type(globals()[key]) # int or str globals()[key] = conv(v) if 'output' in sections: DEFAULT_OUTPUT.update(conf.items('output')) if 'colors' in sections: COLOR.update(conf.items('colors')) if 'symbols' in sections: SYMBOL.update(conf.items('symbols')) if 'issues' in sections: # Preload the known issues for num, val in conf.items('issues'): rule = tuple(arg.strip() for arg in val.split(':')) issues._preload.append((num, rule)) # Set timeout socket.setdefaulttimeout(DEFAULT_TIMEOUT) # Prepare the output colors prepare_output() # Tweak configuration if DEFAULT_FAILURES and not options.failures: options.failures = DEFAULT_FAILURES.split() if options.failures: # ignore the -q option options.quiet = 0 if options.no_color: # replace the colorizer cformat = _cformat_plain if options.id == "build": # Use the build number as identifier BUILD_ID = "num" # out(options, args) return options, args
def load(cls): obj = cls() if not config_directory.exists() and not cls.config_file.exists(): return obj cp = ConfigParser() cp.read(cls.config_file) obj.contacts.update(cp.items("contacts")) obj.calendars.update(cp.items("calendars")) return obj
def read_config(self, args): if len(args) != 2: print('Usage: ', args[0], '<config file>') sys.exit(1) config = ConfigParser() config.readfp(open(args[1])) drivers = [] def universes(section): d = dict(config.items(section)) return (d['driver'], d['port']) for (driver, port), lights in itertools.groupby(config.sections(), universes): if driver == 'dmx': fixtures = [] for light in list(lights): l = dict(config.items(light)) fixtures.append(DmxFixture(**l)) try: d = DriverDmx(fixtures, port=port) drivers.append(d) except: print("not adding", port) elif driver == 'allpixel': light = dict(config.items(list(lights)[0])) try: d = DriverSerial(num=int(light['length']),type=getattr(LEDTYPE, light['type']), dev=port) drivers.append(d) except: print("not adding", port) elif driver == 'emulator': light = dict(config.items(list(lights)[0])) try: num_pixels = int(math.sqrt(int(light['length']))) d = DriverVisualizer(width=num_pixels, height=num_pixels) drivers.append(d) except: print("error adding visualizer") elif driver == 'dummy': light = dict(config.items(list(lights)[0])) print(light) try: num_pixels = int(light['length']) d = DriverDummy(num=num_pixels) drivers.append(d) except: print("error adding dummy, you dummy") elif driver == 'audio': light = dict(config.items(list(lights)[0])) self.audio_port = int(port) return drivers
def read_config(path=None, section="edwin"): if path is None: path = get_default_config_file() #pragma NO COVERAGE here = os.path.dirname(path) config = {} parser = ConfigParser({'here': here}) parser.read([path,]) for k, v in parser.items('DEFAULT'): config[k] = v for k, v in parser.items(section): config[k] = v return config
def __init__(self, file): self.file = file if file: f = open(file, "r") cp = ConfigParser() cp.readfp(f) f.close() self.dict = dict(cp.items("user")) if cp.has_section("templates"): self.templates = dict(cp.items("templates")) else: self.templates = dict()
class UIMapFileHandler: def __init__(self, uiMapFiles): self.readFiles(uiMapFiles) def readFiles(self, uiMapFiles): # See top of file: uses the version from 2.6 self.writeParsers = map(WriteParserHandler, uiMapFiles) if len(self.writeParsers) == 1: self.readParser = self.writeParsers[0] else: self.readParser = ConfigParser(dict_type=OrderedDict) self.readParser.optionxform = str # don't automatically lower-case everything self.readParser.read(uiMapFiles) def storeInfo(self, sectionName, signature, eventName): if not self.readParser.has_section(sectionName): self.writeParsers[-1].add_section(sectionName) signature = signature.replace("::", "-") # Can't store :: in ConfigParser unfortunately if not self.readParser.has_option(sectionName, signature): for writeParser in self.writeParsers: if writeParser.has_section(sectionName): writeParser.set(sectionName, signature, eventName) def findWriteParser(self, section): for parser in self.writeParsers: if parser.has_section(section): return parser def updateSectionName(self, section, newName): """ Note, we only add a new section, don't delete the old one as we once did This is in case some other widget is using it, which is possible and not easily detected currently Let the user clean away the old section if they want to.""" writeParser = self.findWriteParser(section) if not writeParser.has_section(newName): writeParser.add_section(newName) for name, value in self.readParser.items(section): writeParser.set(newName, name, value) return newName def write(self, *args): for parserHandler in self.writeParsers: parserHandler.write() def __getattr__(self, name): return getattr(self.readParser, name) def findSectionAndOption(self, valueString): for section in self.readParser.sections(): for optionName, value in self.readParser.items(section): if value and valueString.startswith(value): return section, optionName return None, None
def get_config(): my_dir = os.path.dirname(os.path.realpath(argv[0])) my_ini = os.path.join(my_dir, 'steepleherder.ini') cp = ConfigParser() cp.read(my_ini) config = {} config['credentials'] = dict(cp.items('Credentials')) config['repo'] = dict(cp.items('Repo')) config['system'] = dict(cp.items('System')) return config
class ProjectManager(object): def __init__(self, mode=None): self.options = {} self.exePath = '' self.configPath = expanduser('~/.laue_script.ini') self.config = ConfigParser() try: self.config.read(self.configPath) self.config.items('Projects') except NoSectionError: self._make_config() self.config.read(self.configPath) if not mode == 'create': self._parseArgs() def getProjectsDirectory(self): return self.config.get('Projects', 'ProjectPath') def projectExists(self, projectName): return isdir(self.getProjectsDirectory() + '/{}'.format(projectName)) def _make_config(self): conf = ConfigParser() conf.add_section('Projects') conf.add_section('Options') conf.set('Projects', 'ProjectPath', expanduser('~/Laue-Script/projects')) conf.set('Options', '-p', True) conf.set('Options', '-v', False) conf.set('Options', 'build', False) with open(expanduser('~/.laue_script.ini'), 'w') as fp: conf.write(fp) def _parseArgs(self): self.options = {} parser = FirstParser(self) items = {item[0]: item[1] for item in self.config.items('Options')} for string in argv: try: parser = parser(string, items) except ArgParseError: print 'Exiting gracefully after argument parsing error.' exit() try: parser.validate() except ArgParseError: print 'Exiting gracefully...' exit() def arg(self, string): try: return self.options[string] except KeyError: return False
def loadConfig(config_file): config = ConfigParser() config.read(config_file) try: storage_conf = dict(config.items('storage')) style_headers = csv.reader([config.get('worker', 'styles')],skipinitialspace=True).next() except Exception as ex: print>>sys.stderr,"ERROR: failed to load worker configuration from: %s (%s)" % (config, str(ex)) sys.exit(1) formats = {} try: formats = dict(config.items('formats')) for name, format in formats.iteritems(): formats[name] = csv.reader([format], skipinitialspace=True).next() except Exception as ex: print>>sys.stderr,"ERROR: failed to load worker configuration for formats from: %s (%s)" % (config, str(ex)) styles = {} for style_header in style_headers : print>>sys.stderr,"Loading '%s' style" % style_header try : name = config.get(style_header, "type") styles[name] = formats[name] except Exception as ex: print>>sys.stderr,"ERROR: failed to load worker configuration for '%s' from: %s (%s)" % (style_header,config, str(ex)) sys.exit(1) renames = {} for fmt in set([item for sublist in styles.values() for item in sublist]): try: opts = dict(config.items(fmt)) if 'pil_name' in opts: renames[fmt] = opts['pil_name'] except Exception as ex: print>>sys.stderr,"ERROR: failed to load worker configuration for format '%s' from: %s (%s)" % (fmt,config, str(ex)) sys.exit(1) for style in styles: fmts2 = [] for fmt in styles[style]: if fmt in renames: fmts2.append(renames[fmt]) else: fmts2.append(fmt) styles[style] = fmts2 return styles, tile_storage.TileStorage(storage_conf)
def raw_items(self, sec): '''Return the items in a section without decrypting the values.''' return ConfigParser.items(self, sec)
def __init__(self, app, conf): self.app = app self.memcache_servers = conf.get('memcache_servers') serialization_format = conf.get('memcache_serialization_support') try: # Originally, while we documented using memcache_max_connections # we only accepted max_connections max_conns = int( conf.get('memcache_max_connections', conf.get('max_connections', 0))) except ValueError: max_conns = 0 memcache_options = {} if (not self.memcache_servers or serialization_format is None or max_conns <= 0): path = os.path.join(conf.get('swift_dir', '/etc/swift'), 'memcache.conf') memcache_conf = ConfigParser() if memcache_conf.read(path): # if memcache.conf exists we'll start with those base options try: memcache_options = dict(memcache_conf.items('memcache')) except NoSectionError: pass if not self.memcache_servers: try: self.memcache_servers = \ memcache_conf.get('memcache', 'memcache_servers') except (NoSectionError, NoOptionError): pass if serialization_format is None: try: serialization_format = \ memcache_conf.get('memcache', 'memcache_serialization_support') except (NoSectionError, NoOptionError): pass if max_conns <= 0: try: new_max_conns = \ memcache_conf.get('memcache', 'memcache_max_connections') max_conns = int(new_max_conns) except (NoSectionError, NoOptionError, ValueError): pass # while memcache.conf options are the base for the memcache # middleware, if you set the same option also in the filter # section of the proxy config it is more specific. memcache_options.update(conf) connect_timeout = float( memcache_options.get('connect_timeout', CONN_TIMEOUT)) pool_timeout = float(memcache_options.get('pool_timeout', POOL_TIMEOUT)) tries = int(memcache_options.get('tries', TRY_COUNT)) io_timeout = float(memcache_options.get('io_timeout', IO_TIMEOUT)) if not self.memcache_servers: self.memcache_servers = '127.0.0.1:11211' if max_conns <= 0: max_conns = 2 if serialization_format is None: serialization_format = 2 else: serialization_format = int(serialization_format) self.memcache = MemcacheRing( [s.strip() for s in self.memcache_servers.split(',') if s.strip()], connect_timeout=connect_timeout, pool_timeout=pool_timeout, tries=tries, io_timeout=io_timeout, allow_pickle=(serialization_format == 0), allow_unpickle=(serialization_format <= 1), max_conns=max_conns)
class Config(object): """This class represents the barman configuration. Default configuration files are /etc/barman.conf, /etc/barman/barman.conf and ~/.barman.conf for a per-user configuration """ CONFIG_FILES = [ '~/.barman.conf', '/etc/barman.conf', '/etc/barman/barman.conf', ] _QUOTE_RE = re.compile(r"""^(["'])(.*)\1$""") def __init__(self, filename=None): self._config = ConfigParser() if filename: if hasattr(filename, 'read'): self._config.readfp(filename) else: # check for the existence of the user defined file if not os.path.exists(filename): sys.exit("Configuration file '%s' does not exist" % filename) self._config.read(os.path.expanduser(filename)) else: # Check for the presence of configuration files # inside default directories for path in self.CONFIG_FILES: full_path = os.path.expanduser(path) if os.path.exists(full_path) \ and full_path in self._config.read(full_path): filename = full_path break else: sys.exit("Could not find any configuration file at " "default locations.\n" "Check Barman's documentation for more help.") self.config_file = filename self._servers = None self.servers_msg_list = [] self._parse_global_config() def get(self, section, option, defaults=None, none_value=None): """Method to get the value from a given section from Barman configuration """ if not self._config.has_section(section): return None try: value = self._config.get(section, option, raw=False, vars=defaults) if value.lower() == 'none': value = none_value if value is not None: value = self._QUOTE_RE.sub(lambda m: m.group(2), value) return value except NoOptionError: return None def _parse_global_config(self): """ This method parses the global [barman] section """ self.barman_home = self.get('barman', 'barman_home') self.barman_lock_directory = self.get( 'barman', 'barman_lock_directory') or self.barman_home self.user = self.get('barman', 'barman_user') or DEFAULT_USER self.log_file = self.get('barman', 'log_file') self.log_format = self.get('barman', 'log_format') or DEFAULT_LOG_FORMAT self.log_level = self.get('barman', 'log_level') or DEFAULT_LOG_LEVEL # save the raw barman section to be compared later in # _is_global_config_changed() method self._global_config = set(self._config.items('barman')) def _is_global_config_changed(self): """Return true if something has changed in global configuration""" return self._global_config != set(self._config.items('barman')) def load_configuration_files_directory(self): """ Read the "configuration_files_directory" option and load all the configuration files with the .conf suffix that lie in that folder """ config_files_directory = self.get('barman', 'configuration_files_directory') if not config_files_directory: return if not os.path.isdir(os.path.expanduser(config_files_directory)): _logger.warn( 'Ignoring the "configuration_files_directory" option as "%s" ' 'is not a directory', config_files_directory) return for cfile in sorted( iglob( os.path.join(os.path.expanduser(config_files_directory), '*.conf'))): filename = os.path.basename(cfile) if os.path.isfile(cfile): # Load a file _logger.debug('Including configuration file: %s', filename) self._config.read(cfile) if self._is_global_config_changed(): msg = "the configuration file %s contains a not empty [" \ "barman] section" % filename _logger.fatal(msg) raise SystemExit("FATAL: %s" % msg) else: # Add an info that a file has been discarded _logger.warn('Discarding configuration file: %s (not a file)', filename) def _populate_servers(self): """ Populate server list from configuration file Also check for paths errors in configuration. If two or more paths overlap in a single server, that server is disabled. If two or more directory paths overlap between different servers an error is raised. """ # Populate servers if self._servers is not None: return self._servers = {} # Cycle all the available configurations sections for section in self._config.sections(): if section == 'barman': # skip global settings continue # Exit if the section has a reserved name if section in FORBIDDEN_SERVER_NAMES: msg = "the reserved word '%s' is not allowed as server name." \ "Please rename it." % section _logger.fatal(msg) raise SystemExit("FATAL: %s" % msg) # Create a ServerConfig object self._servers[section] = ServerConfig(self, section) # Check for conflicting paths in Barman configuration self._check_conflicting_paths() def _check_conflicting_paths(self): """ Look for conflicting paths intra-server and inter-server """ # All paths in configuration servers_paths = {} # Global errors list self.servers_msg_list = [] # Cycle all the available configurations sections for section in sorted(self._config.sections()): if section == 'barman': # skip global settings continue # Paths map section_conf = self._servers[section] config_paths = { 'backup_directory': section_conf.backup_directory, 'basebackups_directory': section_conf.basebackups_directory, 'errors_directory': section_conf.errors_directory, 'incoming_wals_directory': section_conf.incoming_wals_directory, 'streaming_wals_directory': section_conf.streaming_wals_directory, 'wals_directory': section_conf.wals_directory, } # Check for path errors for label, path in sorted(config_paths.items()): # If the path does not conflict with the others, add it to the # paths map real_path = os.path.realpath(path) if real_path not in servers_paths: servers_paths[real_path] = PathConflict(label, section) else: if section == servers_paths[real_path].server: # Internal path error. # Insert the error message into the server.msg_list if real_path == path: self._servers[section].msg_list.append( "Conflicting path: %s=%s conflicts with " "'%s' for server '%s'" % (label, path, servers_paths[real_path].label, servers_paths[real_path].server)) else: # Symbolic link self._servers[section].msg_list.append( "Conflicting path: %s=%s (symlink to: %s) " "conflicts with '%s' for server '%s'" % (label, path, real_path, servers_paths[real_path].label, servers_paths[real_path].server)) # Disable the server self._servers[section].disabled = True else: # Global path error. # Insert the error message into the global msg_list if real_path == path: self.servers_msg_list.append( "Conflicting path: " "%s=%s for server '%s' conflicts with " "'%s' for server '%s'" % (label, path, section, servers_paths[real_path].label, servers_paths[real_path].server)) else: # Symbolic link self.servers_msg_list.append( "Conflicting path: " "%s=%s (symlink to: %s) for server '%s' " "conflicts with '%s' for server '%s'" % (label, path, real_path, section, servers_paths[real_path].label, servers_paths[real_path].server)) def server_names(self): """This method returns a list of server names""" self._populate_servers() return self._servers.keys() def servers(self): """This method returns a list of server parameters""" self._populate_servers() return self._servers.values() def get_server(self, name): """ Get the configuration of the specified server :param str name: the server name """ self._populate_servers() return self._servers.get(name, None) def validate_global_config(self): """ Validate global configuration parameters """ # Check for the existence of unexpected parameters in the # global section of the configuration file keys = [ 'barman_home', 'barman_lock_directory', 'barman_user', 'log_file', 'log_level', 'configuration_files_directory' ] keys.extend(ServerConfig.KEYS) self._validate_with_keys(self._global_config, keys, 'barman') def validate_server_config(self, server): """ Validate configuration parameters for a specified server :param str server: the server name """ # Check for the existence of unexpected parameters in the # server section of the configuration file self._validate_with_keys(self._config.items(server), ServerConfig.KEYS, server) @staticmethod def _validate_with_keys(config_items, allowed_keys, section): """ Check every config parameter against a list of allowed keys :param config_items: list of tuples containing provided parameters along with their values :param allowed_keys: list of allowed keys :param section: source section (for error reporting) """ for parameter in config_items: # if the parameter name is not in the list of allowed values, # then output a warning name = parameter[0] if name not in allowed_keys: output.warning( 'Invalid configuration option "%s" in [%s] ' 'section.', name, section)
def __init__(self, config, logger): """ Constructs the Narrowband object according to the supplied XML description. """ # Initialize data members self.filename = None self.fileStart = None self.startTime = None self.logger = logger self.already_saved = True self.x = None self.start_index = None self.num_samples = None #Filter Properties: #DemodDec = self._GetIntElemVal(config, "Demod_Decimate_Factor",100) #Default: 100kHz -> 1kHz : A,B #DemodBufLen = self._GetIntElemVal(config, "Demod_Buffer_Length",100000) #Default: buffers 1 sec. of 100 kHz data #FilteredLen = self._GetIntElemVal(config, "Filtered_Length",1000) #Default: buffers 1 sec. of 100 kHz data #OutputLen = self._GetIntElemVal(config, "Output_Buffer_Length",50) #Default: 50 Note: changing does not seem to matter... #DemodTimeInt = self._GetDblElemVal(config, "Demod_Time_Interval",10e-6) #Default: 10 ms resolution for 100kHz. do_low_res = self._GetIntElemVal(config, "do_low_res", 1) #1 yes, 0 no do_sph_chan = self._GetIntElemVal(config, "do_sph_chan", 1) sampleFrequency = self._GetIntElemVal(config, "Sample_Frequency", 50) # Sampling rate [Hz] calibrationFactor = self._GetDblElemVal(config, "Calibration_Factor", 1) #Default: 1 ## FilterLength = self._GetIntElemVal(config, "filter_length",1000) #[Number of Filter Taps to use] filter_tap_file = self._GetStrElemVal( config, "filter_taps", './filter_taps.txt') #[Path to txt Filter File] call_signs = self._GetStrElemVal(config, "call_sign", 'XXX') conf_file = self._GetStrElemVal(config, "call_sign_file", './nb.conf') call_sign_config = ConfigParser() fp = open(conf_file) call_sign_config.readfp(fp) fp.close() ## ftaps = sio.loadmat(filter_tap_file) ## FilterTaps = np.array(ftaps['filter_taps'].take(np.arange(FilterLength)), dtype='float32') FilterTaps = np.loadtxt(filter_tap_file, unpack=True, dtype='float32') FilterLength = len(FilterTaps) #print FilterTaps #(change to be defined by callsign) call_sign = [] TransFreq = [] is_msk = [] #use call_sign_config to find frequencies, msk flags: avail_transmitters = call_sign_config.items('NB_Transmitters') call_signs = call_signs.split(',') for callSign in call_signs: callSign = callSign.strip() for avail_sign in avail_transmitters: if callSign.lower() == avail_sign[0].strip().lower(): call_sign.append(callSign) freq_msk = avail_sign[1].split(',') TransFreq.append(int(freq_msk[0].strip())) #[Hz] is_msk.append(int( freq_msk[1].strip())) #[0 (no) or 1 (yes)] nb_input = [ FilterTaps, FilterLength, TransFreq, sampleFrequency, calibrationFactor, is_msk, do_low_res, call_sign, do_sph_chan ] #0-7 self.queue = Queue(500) asyncSave = Process(target=_AsyncSave, args=(self.queue, nb_input, config)) asyncSave.daemon = True asyncSave.start()
class PromiseClient(Client): def __getattr__(self, name): return Promise(name, self) if __name__ == "__main__": from xodb.tools.signals import register_signals register_signals() import sys from ConfigParser import ConfigParser if len(sys.argv) < 1: print "usage: %s config_file" % sys.argv[0] sys.exit(-1) config = ConfigParser() config.read(sys.argv[1]) client_config = dict(config.items('client')) client_url = client_config.get('client_url') log_file = client_config.get('log_file') timeout = int(client_config.get('timeout')) retry_limit = int(client_config.get('retry_limit')) import random c = Client(client_url, timeout, retry_limit) p = PromiseClient(client_url, timeout, retry_limit)
class TCConfig(object): TCC_CONFIGURATION = 0 TCC_LAYOUT = 1 TCC_MCG_FW_DIR_HIST = 2 TCC_MCG_FW_VERS_HIST = 3 TCC_MCG_CFG_DIR_HIST = 4 TCC_MCG_CFG_VERS_HIST = 5 TCC_MCG_PACK_DIR_HIST = 6 TCC_PRODBASE_VERS_HIST = 7 TCC_INST_DST_HIST = 8 TCC_PACK_BRNCH_HIST = 9 TCC_PACK_TAG_HIST = 10 TCC_INST_SRC_HIST = 11 TCC_INST_DST_HIST = 12 TCC_INST_DIR_HIST = 13 TCC_SECTION_NAMES = { TCC_CONFIGURATION: 'Configuration', TCC_LAYOUT: 'Layout', TCC_MCG_FW_DIR_HIST: 'McgFwDirHistory', TCC_MCG_FW_VERS_HIST: 'McgFwVersionHistory', TCC_MCG_CFG_DIR_HIST: 'McgCfgDirHistory', TCC_MCG_CFG_VERS_HIST: 'McgCfgVersionHistory', TCC_MCG_PACK_DIR_HIST: 'McgPackDirHistory', TCC_PRODBASE_VERS_HIST: 'ProdBaseVersionHistory', TCC_PACK_BRNCH_HIST: 'CreateMcgPackBranchHistory', TCC_PACK_TAG_HIST: 'CreateMcgPackTagHistory', TCC_INST_SRC_HIST: 'CopyToInstallSrcHistory', TCC_INST_DST_HIST: 'CopyToInstallDstHistory', TCC_INST_DIR_HIST: 'InstallDirHistory' } def __init__(self, config_file_name): # read ini file self.config_file_name = config_file_name self.config = ConfigParser() self.config.optionxform = str self.config.read(config_file_name) self.tc_config_meta = { self.TCC_MCG_FW_DIR_HIST: { 'max_options': 10 }, self.TCC_MCG_FW_VERS_HIST: { 'max_options': 10 }, self.TCC_MCG_CFG_DIR_HIST: { 'max_options': 10 }, self.TCC_MCG_CFG_VERS_HIST: { 'max_options': 10 }, self.TCC_MCG_PACK_DIR_HIST: { 'max_options': 10 }, self.TCC_PRODBASE_VERS_HIST: { 'max_options': 10 }, self.TCC_PACK_BRNCH_HIST: { 'max_options': 10 }, self.TCC_PACK_TAG_HIST: { 'max_options': 10 }, self.TCC_INST_SRC_HIST: { 'max_options': 10 }, self.TCC_INST_DST_HIST: { 'max_options': 10 }, self.TCC_INST_DIR_HIST: { 'max_options': 10 } } tmp_available_sections = self.config.sections() for section_id, section_name in self.TCC_SECTION_NAMES.items(): if section_name not in tmp_available_sections: self.config.add_section(section_name) def saveConfig(self, config_file_name): # watch out, write expects file open as text file - not binary!! with open(config_file_name, 'w') as configfile: try: self.config.write(configfile) except: print('Unexpected error:', str(sys.exc_info())) def printOutConfig(self): for section_name in self.config.sections(): print('Section:', section_name) print(' Options:', self.config.options(section_name)) for name, value in self.config.items(section_name): print(' %s = %s' % (name, value)) print() def getSectionValues(self, section_id): valueList = list() try: for name, value in self.config.items( self.TCC_SECTION_NAMES[section_id]): valueList.append(value) except NoSectionError: print('Section %s does not exists', self.TCC_SECTION_NAMES[section_id]) except: print('Unexpected error:', sys.exc_info()[0]) return valueList def getSectionFull(self, section_id): if section_id == TCConfig.TCC_LAYOUT: section_list = dict() try: for option_name, option_value in self.config.items( self.TCC_SECTION_NAMES[section_id]): if option_name in ['geometry', 'state']: section_list[option_name] = option_value else: section_list[option_name] = int(option_value) except NoSectionError: print('Section %s does not exists', self.TCC_SECTION_NAMES[section_id]) except: print('Unexpected error:', sys.exc_info()[0]) else: section_list = list() for option_name, option_value in self.config.items( self.TCC_SECTION_NAMES[section_id]): section_list.append((option_name, option_value)) return section_list def updateSection(self, section_id, new_section_data): if section_id == TCConfig.TCC_LAYOUT: for option_name in new_section_data: try: self.config.set(self.TCC_SECTION_NAMES[section_id], option_name, new_section_data[option_name]) except NoSectionError: print('Section %s does not exists', self.TCC_SECTION_NAMES[section_id]) except: print('Unexpected error:', sys.exc_info()[0]) else: option_counter = 0 max_options = self.tc_config_meta[section_id]['max_options'] for data_value in new_section_data: self.config.set(self.TCC_SECTION_NAMES[section_id], str(option_counter), data_value) option_counter += 1 if option_counter >= max_options: break pass
def __init__(self, root): """Save locally the configuration from scrapy.properties.""" cfg = ConfigParser() cfg.readfp(open('scrapy.properties')) self.conf = {k: loads(v) for k, v in cfg.items('autodiscover')}
builtins._ASTROPY_SETUP_ = True from astropy_helpers.setup_helpers import (register_commands, get_debug_option, get_package_info) from astropy_helpers.git_helpers import get_git_devstr from astropy_helpers.version_helpers import generate_version_py # Get some values from the setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read(['setup.cfg']) metadata = dict(conf.items('metadata')) PACKAGENAME = metadata.get('package_name', 'packagename') DESCRIPTION = metadata.get('description', 'Astropy affiliated package') AUTHOR = metadata.get('author', '') AUTHOR_EMAIL = metadata.get('author_email', '') LICENSE = metadata.get('license', 'unknown') URL = metadata.get('url', 'https://github.com/jmccormac01/Donuts') # Get the long description from the package's docstring # __import__(PACKAGENAME) # package = sys.modules[PACKAGENAME] # LONG_DESCRIPTION = package.__doc__ LONG_DESCRIPTION = open('README.rst').read() # Store the package name in a built-in variable so it's easy
def read_config(file): config = ConfigParser() config.read(file) # this takes care of closing the file too for key, val in config.items('input_info'): input_dict[key] = val
def main(): '''Main program logic. Becomes the supervisor process.''' parser = OptionParser(usage="usage: %prog [options]\n" "Default settings in config file: laikad.conf") parser.add_option("-d", "--debug", action="store_true", default=False, dest="debug", help="enable debug messages to the console.") parser.add_option("-s", "--scan-config", action="store", type="string", dest="laikaboss_config_path", help="specify a path for laikaboss configuration") parser.add_option("-c", "--laikad-config", action="store", type="string", dest="laikad_config_path", help="specify a path for laikad configuration") parser.add_option("-b", "--broker-backend", action="store", type="string", dest="broker_backend_address", help="specify an address for the workers to connect to. " "ex: tcp://*:5559") parser.add_option("-f", "--broker-frontend", action="store", type="string", dest="broker_frontend_address", help="specify an address for clients to connect to. ex: " "tcp://*:5558") parser.add_option("-w", "--worker-connect", action="store", type="string", dest="worker_connect_address", help="specify an address for clients to connect to. ex: " "tcp://localhost:5559") parser.add_option( "-n", "--no-broker", action="store_true", default=False, dest="no_broker", help="specify this option to disable the broker for this " "instance.") parser.add_option( "-i", "--id", action="store", type="string", dest="runas_uid", help="specify a valid username to switch to after starting " "as root.") parser.add_option("-p", "--processes", action="store", type="int", dest="num_procs", help="specify the number of workers to launch with this " "daemon") parser.add_option("-r", "--restart-after", action="store", type="int", dest="ttl", help="restart worker after scanning this many items") parser.add_option("-t", "--restart-after-min", action="store", type="int", dest="time_ttl", help="restart worker after scanning for this many " "minutes.") parser.add_option("-a", "--async", action="store_true", default=False, dest="run_async", help="enable async messages. " "This will disable any responses back to the client.") parser.add_option( "-g", "--grace-timeout", action="store", type="int", dest="gracetimeout", help="when shutting down, the timeout to allow workers to" " finish ongoing scans before being killed") (options, _) = parser.parse_args() # Set the configuration file path config_location = '/usr/local/laikaboss/etc/laikad.conf' if options.laikad_config_path: config_location = options.laikad_config_path # We need a default framework config at a minimum if options.laikaboss_config_path: laikaboss_config_path = options.laikaboss_config_path logging.debug("using alternative config path: %s" % options.laikaboss_config_path) if not os.path.exists(options.laikaboss_config_path): print "the provided config path is not valid, exiting" return 1 # Next, check to see if we're in the top level source directory (dev environment) elif os.path.exists(DEFAULT_CONFIGS['dev_config_path']): laikaboss_config_path = DEFAULT_CONFIGS['dev_config_path'] # Next, check for an installed copy of the default configuration elif os.path.exists(DEFAULT_CONFIGS['sys_config_path']): laikaboss_config_path = DEFAULT_CONFIGS['sys_config_path'] # Exit else: print 'A valid framework configuration was not found in either of the following locations:\ \n%s\n%s' % (DEFAULT_CONFIGS['dev_config_path'], DEFAULT_CONFIGS['sys_config_path']) return 1 # Read the config file config_parser = ConfigParser() config_parser.read(config_location) # Parse through the config file and append each section to a single dict for section in config_parser.sections(): CONFIGS.update(dict(config_parser.items(section))) if options.num_procs: num_procs = options.num_procs else: num_procs = int(get_option('numprocs')) if options.ttl: ttl = options.ttl else: ttl = int(get_option('ttl')) if options.time_ttl: time_ttl = options.time_ttl else: time_ttl = int(get_option('time_ttl')) if options.broker_backend_address: broker_backend_address = options.broker_backend_address else: broker_backend_address = get_option('brokerbackend') if options.broker_frontend_address: broker_frontend_address = options.broker_frontend_address else: broker_frontend_address = get_option('brokerfrontend') if options.worker_connect_address: worker_connect_address = options.worker_connect_address else: worker_connect_address = get_option('workerconnect') if options.gracetimeout: gracetimeout = options.gracetimeout else: gracetimeout = int(get_option('gracetimeout')) if options.run_async: async = True else: async = strtobool(get_option('async')) logresult = strtobool(get_option('log_result')) # Get the UserID to run as, if it was not specified on the command line # we'll use the current user by default runas_uid = None runas_gid = None if options.runas_uid: from pwd import getpwnam runas_uid = getpwnam(options.runas_uid).pw_uid runas_gid = getpwnam(options.runas_uid).pw_gid if options.debug: logging.basicConfig(level=logging.DEBUG) # Lower privileges if a UID has been set try: if runas_uid: os.setgid(runas_gid) os.setuid(runas_uid) except OSError: print "Unable to set user ID to %i, defaulting to current user" % runas_uid # Add intercept for graceful shutdown def shutdown(signum, frame): '''Signal handler for shutting down supervisor gracefully''' logging.debug("Supervisor: shutdown handler triggered") global KEEP_RUNNING KEEP_RUNNING = False signal.signal(signal.SIGTERM, shutdown) signal.signal(signal.SIGINT, shutdown) # Start the broker broker_proc = None if not options.no_broker: if async: broker_proc = AsyncBroker(broker_backend_address, broker_frontend_address) else: broker_proc = SyncBroker(broker_backend_address, broker_frontend_address, gracetimeout) broker_proc.start() # Start the workers workers = [] for _ in range(num_procs): worker_proc = Worker(laikaboss_config_path, worker_connect_address, ttl, time_ttl, logresult, int(get_option('workerpolltimeout')), gracetimeout) worker_proc.start() workers.append(worker_proc) while KEEP_RUNNING: # Ensure we have a broker if not options.no_broker and not broker_proc.is_alive(): if async: broker_proc = AsyncBroker(broker_backend_address, broker_frontend_address) else: broker_proc = SyncBroker(broker_backend_address, broker_frontend_address, gracetimeout) broker_proc.start() # Ensure we have living workers dead_workers = [] for worker_proc in workers: if not worker_proc.is_alive(): dead_workers.append(worker_proc) for worker_proc in dead_workers: workers.remove(worker_proc) new_proc = Worker(laikaboss_config_path, worker_connect_address, ttl, time_ttl, logresult, int(get_option('workerpolltimeout')), gracetimeout) new_proc.start() workers.append(new_proc) worker_proc.join() # Wait a little bit time.sleep(5) logging.debug("Supervisor: beginning graceful shutdown sequence") logging.info("Supervisor: giving workers %d second grace period", gracetimeout) time.sleep(gracetimeout) logging.info("Supervisor: terminating workers") for worker_proc in workers: if worker_proc.is_alive(): os.kill(worker_proc.pid, signal.SIGKILL) for worker_proc in workers: worker_proc.join() if not options.no_broker: if broker_proc.is_alive(): os.kill(broker_proc.pid, signal.SIGKILL) broker_proc.join() logging.debug("Supervisor: finished")
class Config(object): def __init__(self, path, section): self.init_status = True if os.path.exists(path): self.path = path else: self.init_status = False print("Error, missing config file (%s)." % path) try: self.config = ConfigParser() self.config.read(self.path) except Exception: self.init_status = False print("Error, unable to read config file (%s)." % path) try: if self.config.has_section(section): self.section = section else: print("Error, missing section (%s)." % section) self.init_status = False except Exception: print("Error, unable to read section (%s)." % section) self.init_status = False def is_valid(self): return self.init_status def get_option(self, key=None): if key == None: return collections.OrderedDict(self.config.items(self.section)) if self.config.has_option(self.section, key): return self.config.get(self.section, key) else: print("Error, missing option (%s)." % key) return False def set_options(self, print_options=False): try: if print_options: print("Set config options.") options = dict(self.config.items(self.section)) for option in options: value = self.config.get(self.section, option) setattr(self, option, value) if print_options: print(" set %s to %s" % (option, value)) return True except Exception as e: print("Error, set option failed, %s." % e) return False def verify_options(self): # todo: verify config options and set default value if not exist. return
def setUp(self): self.log("setUp()") self.log("test directory=" + self.test_dir) # 1 read config file and pull out some stuff if not os.path.isfile(os.path.join(self.test_dir, 'test.cfg')): raise ValueError('test.cfg does not exist in test dir') config = ConfigParser() config.read(os.path.join(self.test_dir, 'test.cfg')) self.test_cfg = {} self.nms_test_cfg = {} for entry in config.items('catalog-test'): self.test_cfg[entry[0]] = entry[1] for entry in config.items('NarrativeMethodStore'): self.nms_test_cfg[entry[0]] = entry[1] self.log('test.cfg parse\n' + pformat(self.test_cfg)) # passwords not needed in tests yet self.test_user_1 = self.test_cfg['test-user-1'] #self.test_user_psswd_1 = self.test_cfg['test-user-psswd-1'] self.test_user_2 = self.test_cfg['test-user-2'] #self.test_user_psswd_2 = self.test_cfg['test-user-psswd-2'] # 2 check that db exists and collections are empty self.mongo = MongoClient('mongodb://' + self.test_cfg['mongodb-host']) db = self.mongo[self.test_cfg['mongodb-database']] self.db_version = db[MongoCatalogDBI._DB_VERSION] self.modules = db[MongoCatalogDBI._MODULES] self.module_versions = db[MongoCatalogDBI._MODULE_VERSIONS] self.local_functions = db[MongoCatalogDBI._LOCAL_FUNCTIONS] self.developers = db[MongoCatalogDBI._DEVELOPERS] self.build_logs = db[MongoCatalogDBI._BUILD_LOGS] self.favorites = db[MongoCatalogDBI._FAVORITES] self.client_groups = db[MongoCatalogDBI._CLIENT_GROUPS] self.exec_stats_raw = db[MongoCatalogDBI._EXEC_STATS_RAW] self.exec_stats_apps = db[MongoCatalogDBI._EXEC_STATS_APPS] self.exec_stats_users = db[MongoCatalogDBI._EXEC_STATS_USERS] # just drop the test db self.db_version.drop() self.modules.drop() self.module_versions.drop() self.local_functions.drop() self.developers.drop() self.build_logs.drop() self.favorites.drop() self.client_groups.drop() self.exec_stats_raw.drop() self.exec_stats_apps.drop() self.exec_stats_users.drop() #if self.modules.count() > 0 : # raise ValueError('mongo database collection "'+MongoCatalogDBI._MODULES+'"" not empty (contains '+str(self.modules.count())+' records). aborting.') self.initialize_mongo() # 3 setup the scratch space self.scratch_dir = os.path.join( self.test_dir, 'temp_test_files', datetime.datetime.now().strftime("%Y-%m-%d-(%H-%M-%S-%f)")) self.log("scratch directory=" + self.scratch_dir) os.makedirs(self.scratch_dir) # 4 startup any dependencies (nms, docker registry?) # 4 assemble the config file for the catalog service self.catalog_cfg = { 'admin-users': self.test_user_2, 'mongodb-host': self.test_cfg['mongodb-host'], 'mongodb-database': self.test_cfg['mongodb-database'], 'temp-dir': self.scratch_dir, 'docker-base-url': self.test_cfg['docker-base-url'], 'docker-registry-host': self.test_cfg['docker-registry-host'], 'docker-push-allow-insecure': self.test_cfg['docker-push-allow-insecure'], 'nms-url': self.test_cfg['nms-url'], 'nms-admin-user': self.test_cfg['nms-admin-user'], 'nms-admin-psswd': self.test_cfg['nms-admin-psswd'], 'ref-data-base': self.test_cfg['ref-data-base'], 'kbase-endpoint': self.test_cfg['kbase-endpoint'] }
def _warn_node(self, msg, node, **kwargs): if not msg.startswith('nonlocal image URI found:'): self._warnfunc(msg, '%s:%s' % get_source_line(node), **kwargs) # def _warn_node(self, msg, node): # if not msg.startswith('nonlocal image URI found:'): # self._warnfunc(msg, '%s:%s' % get_source_line(node)) # sphinx.environment.BuildEnvironment.warn_node = _warn_node conf = ConfigParser() conf.optionxform = str conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')]) setup_cfg = dict(conf.items('metadata')) # -- General configuration ---------------------------------------------------- # By default, highlight as Python 3. #highlight_language = 'python3' # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.2' # To perform a Sphinx version check that needs to be more specific than # major.minor, call `check_sphinx_version("x.y.z")` here. # check_sphinx_version("1.2.1") # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files.
' '.join((str(x) for x in se_palette[c])) for n, c in enumerate(se_gradient))) + \ ' )' # Use RGB colors from user input # Use gnuplot> set palette color model RGB # and gnuplot> show palette colornames for RGB color names if options.colorfile: palettedef = 'model RGB' colorprefix = 'rgb' # Read in task colors from configuration file config = ConfigParser() config.optionxform = str # makes option names case sensitive config.readfp(open(options.colorfile, 'r')) # Colors are RGB colornames colors = dict(config.items('Colors')) # Raise KeyError if no color is specified for a task nocolors = [t for t in tasks if not colors.has_key(t)] if nocolors: errorMsg = 'Could not find task color for ' + ', '.join(nocolors) raise KeyError(errorMsg) # Pick colors from a pre-defined palette else: palettedef = 'model RGB defined %s' % se_palettedef colorprefix = 'palette frac' # Colors are fractions from the palette defined colors = dict([(t, '%0.2f' % (float(n) / len(tasks))) for n, t in enumerate(tasks)])
if not read.is_image(): print read.content else: print "Url is an image" else: # we need to make sure you submitted an ini file to use: # args.ini ini = ConfigParser() ini_path = path.join( path.dirname(path.dirname(path.dirname(__file__))), args.ini) ini.readfp(open(ini_path)) here = path.abspath(path.join(path.dirname(__file__), '../../')) ini.set('app:main', 'here', here) initialize_sql(dict(ini.items("app:main"))) ct = 0 all = False while (not all): # start the queue up we'll use to thread the url fetching enclosure_queue = Queue() if args.new_only: # we take off the offset because each time we run, we should # have new ones to process. The query should return the 10 # next non-imported urls url_list = Bmark.query.outerjoin(Readable, Bmark.readable).\ filter(Readable.imported == None).\ limit(PER_TRANS).all()
if args.input: vcf_reader = vcf.Reader(open(args.input, 'r')) else: vcf_reader = vcf.Reader(sys.stdin) if args.output: stdout = open(args.output, 'wb') else: stdout = sys.stdout # define the used annotations: if args.config: conf = ConfigParser() conf.read(args.config) pathConf = {a: b for a, b in conf.items('Path')} annotationConf = {a: b for a, b in conf.items('Annotations')} if pathConf['roottype'].lower() in ['v', 'var', 'env']: root_dir = os.environ[pathConf['rootdir']] else: root_dir = pathConf['rootdir'] root_anno = (root_dir + pathConf['annodir']).replace('//', '/') reference = root_anno + pathConf['reference'] included_annotations = [ annotationName for annotationName in annotationConf.keys() if annotationConf[annotationName] != 'Ignore' ] for annotation in annotations: name = annotation.name.lower()
help='password') opt_parser.add_argument('-v', '--version', action='version', version='WitHost %s' % version, help='show WitPowser version') args = opt_parser.parse_args() cfg_parser = ConfigParser() ret = cfg_parser.read('.config') if not ret: print '".config" dose not exist!\n' exit() conf = {} for sect in cfg_parser.sections(): for (key, value) in cfg_parser.items(sect): conf[sect + '.' + key] = value apps = conf.get('sys.apps', '').split() if args.operation == 'add': user = args.user fname = args.fname or args.user password = args.password or 'wit%s' % user mail = args.email or base.name_to_mail(fname) if conf.has_key('sys.group'): group = conf['sys.group'] else: print "Invalid configuration\n" exit()
RENDER_SETTINGS = [('color', 'COLOR'), ('charttype', 'TYPE'), ('title', 'TITLE')] DRAW_SETTINGS = [('x_offset', 'XOFFSET'), ('y_offset', 'YOFFSET'), ('z_offset', 'ZOFFSET'), ('orientation', 'ORIENTATION'), ('mirrored', 'MIRROR')] if os.path.exists('chart.conf'): c.read(['chart.conf']) charts = [] sections = c.sections() for section in [ section for section in sections if section != 'splunk' and section != 'minecraft' ]: t = dict(c.items(section)) o = {} for (k, v) in t.iteritems(): if k.upper() in INT_SETTINGS: o[k.upper()] = int(v) elif k.upper() in BOOLEAN_SETTINGS: o[k.upper()] = bool(int(v)) else: o[k.upper()] = v charts.append(o) t = dict(c.items('splunk')) splunkconf = {} for (k, v) in t.iteritems(): splunkconf[k.upper()] = v
def setUpClass(cls): token = environ.get('KB_AUTH_TOKEN', None) config_file = environ.get('KB_DEPLOYMENT_CONFIG', None) cls.cfg = {} config = ConfigParser() config.read(config_file) for nameval in config.items('MetagenomeAPI'): cls.cfg[nameval[0]] = nameval[1] # Getting username from Auth profile for token authServiceUrl = cls.cfg['auth-service-url'] auth_client = _KBaseAuth(authServiceUrl) user_id = auth_client.get_user(token) # WARNING: don't call any logging methods on the context object, # it'll result in a NoneType error cls.ctx = MethodContext(None) cls.ctx.update({ 'token': token, 'user_id': user_id, 'provenance': [{ 'service': 'MetagenomeAPI', 'method': 'please_never_use_it_in_production', 'method_params': [] }], 'authenticated': 1 }) cls.wsURL = cls.cfg['workspace-url'] cls.wsClient = workspaceService(cls.wsURL) cls.serviceImpl = MetagenomeAPI(cls.cfg) cls.scratch = cls.cfg['scratch'] cls.callback_url = os.environ['SDK_CALLBACK_URL'] suffix = int(time.time() * 1000) cls.wsName = "test_kb_maxbin_" + str(suffix) cls.ws_info = cls.wsClient.create_workspace({'workspace': cls.wsName}) # you could bypass creating objects for more rapid testing here # cls.binnedcontigs_ref_1 = '19621/2/1' # cls.assembly_ref_1 = '19621/1/1' # return # create some test data cls.au = AssemblyUtil(cls.callback_url) cls.mu = MetagenomeUtils(cls.callback_url) cls.dfu = DataFileUtil(cls.callback_url) # building Assembly assembly_filename = 'small_bin_contig_file.fasta' cls.assembly_fasta_file_path = os.path.join(cls.scratch, assembly_filename) shutil.copy(os.path.join("data", assembly_filename), cls.assembly_fasta_file_path) assembly_params = { 'file': { 'path': cls.assembly_fasta_file_path }, 'workspace_name': cls.wsName, 'assembly_name': 'MyAssembly' } print(os.path.isfile(cls.assembly_fasta_file_path)) cls.assembly_ref_1 = cls.au.save_assembly_from_fasta(assembly_params) print('Assembly1:' + cls.assembly_ref_1) # stage and build BinnedContigs data test_directory_name = 'test_maxbindata' cls.test_directory_path = os.path.join(cls.scratch, test_directory_name) if not os.path.exists(cls.test_directory_path): os.makedirs(cls.test_directory_path) print(os.listdir(cls.test_directory_path)) for item in os.listdir(os.path.join("data", "MaxBin_Result_Sample")): shutil.copy(os.path.join("data", "MaxBin_Result_Sample", item), os.path.join(cls.test_directory_path, item)) cls.binnedcontigs_ref_1 = cls.mu.file_to_binned_contigs({ 'file_directory': cls.test_directory_path, 'assembly_ref': cls.assembly_ref_1, 'binned_contig_name': 'MyBins', 'workspace_name': cls.wsName })['binned_contig_obj_ref'] print('BinnedContigs1:' + cls.binnedcontigs_ref_1)
def parse_config(fn, section): cf = ConfigParser() cf.read(fn) return dict(cf.items(section))
#################### ### parse config ### #################### configfile = "xpmig.ini" cfg = ConfigParser() cfg.read(configfile) for mandatory_section in ("boxpair", "serialnbr", "instance", "site", "collect", "dir"): if not cfg.has_section(mandatory_section): sys.stderr("{} section missing in config file {}, exiting..".format( mandatory_section, configfile)) sys.exit(1) for name, value in cfg.items("boxpair"): boxpair_dict[name.upper()] = value.split(",") for name, value in cfg.items("serialnbr"): serialnbr_dict[name.upper()] = int(value) for name, value in cfg.items("instance"): instance_dict[name.upper()] = int(value) for name, value in cfg.items("site"): site_dict[name.upper()] = value for name, value in cfg.items("collect"): collectfile_dict[name.upper()] = value try:
def configure(ini): global runs, repeatevery, cutoff, delay, maxtime, logfilemax, outputmax, \ make, makefile, affinitymask try: parser = ConfigParser() parser.read(ini) # override default directory locations for k, v in parser.items('dirs'): dirs[k] = normpath(expandvars(expanduser(v))) for k, v in parser.items('filters'): filters[k] = v.split() filters['onlydirs'] = frozenset(filters['onlydirs']) for k, v in parser.items('alias'): alias[k] = v.split() make = frozenset(parser.get('build', 'make').split()) f = dirs['makefile'] if 'makefile' in dirs else defaultMakefile() makefile = normpath(expandvars(expanduser(f))) if f else None # compiler interpreter runtime location shell vars for k, v in parser.items('tools'): os.environ[k.upper()] = v commandlines.update(parser.items('commandlines')) for k, v in parser.items('testrange'): testrange[k] = v.split() for k, v in parser.items('testdata'): testdata[k] = v for k, v in parser.items('outputcheck'): outputcheck[k] = frozenset(v.split()) for k, v in parser.items('ndiff_outputcheck'): ndiff_outputcheck[k] = v # test specific shell vars default = {} for each in filters['onlydirs']: if parser.has_section(each): d = {} for k, v in parser.items(each): d[k.upper()] = v default[k.upper()] = '' testenv[each] = d testenv['default'] = default s = 'measure' if parser.has_section(s): for o in parser.options(s): if o in ('runs'): runs = parser.getint(s, o) elif o in ('repeatevery'): repeatevery = parser.getboolean(s, o) elif o in ('cutoff'): cutoff = parser.getint(s, o) elif o in ('delay'): delay = parser.getfloat(s, o) elif o in ('maxtime'): maxtime = parser.getint(s, o) elif o in ('logfilemax'): logfilemax = parser.getint(s, o) elif o in ('outputmax'): outputmax = parser.getint(s, o) elif o in ('affinitymask'): affinitymask = parser.getint(s, o) except (NoSectionError, NoOptionError), e: if logger: logger.debug(e) print e, 'in', realpath(ini) sys.exit(2)
class Configuration(object): __default_settings__ = { 'daemon': { 'ip': '127.0.0.1', }, 'acquisition': { 'ip': '127.0.0.1', 'protocol': 'tcp', 'port': '*', 'sampling_rate': '20000', 'dtype': 'int16', 'mapping': '', }, } def __init__(self, path=None): for section_key, section_value in self.__default_settings__.items(): section_value = ConfigurationSection(section_value) setattr(self, section_key, section_value) if path is not None: self.path = os.path.abspath(os.path.expanduser(path)) # Parse base configuration self.parser = ConfigParser() # base_path = os.path.join(self.path, CONFIGURATION_FILES['base']) self.parser.read(self.path) for section_key in self.parser.sections(): section_value = self.parser.items(section_key) section_value = dict(section_value) section_value = ConfigurationSection(section_value) self.__default_settings__[section_key] = section_value setattr(self, section_key, section_value) # Parser hosts configuration # hosts_parser = HostsParser() # hosts_path = os.path.join(self.path, CONFIGURATION_FILES['hosts']) # hosts_parser.read(hosts_path) # self.update(hosts_parser.parameters) def __repr__(self): return str(self.__default_settings__) @property def sections(self): return self.__default_settings__.keys() @property def options(self): options_list = dict([(section_key, getattr(self, section_key).options) for section_key in self.sections]) return options_list @property def values(self): values_list = dict([(section_key, getattr(self, section_key).values) for section_key in self.sections]) return values_list def update(self, parameters): for key, value in parameters.iteritems(): if isinstance(value, dict): value = ConfigurationSection(value) setattr(self, key, value) return @property def nb_nodes(self): return len(self.__default_settings__) - 2 # As daemon and acquisition are special nodes
class MMTEngine(object): injector_section = 'engine' injectable_fields = { 'lm_type': ('LM implementation', (basestring, LanguageModel.available_types), None), 'aligner_type': ('Aligner implementation', (basestring, WordAligner.available_types), None), 'enable_tag_projection': ('Enable Tag Projection, this may take some time during engine startup.', bool, False) } training_steps = [ 'tm_cleanup', 'preprocess', 'context_analyzer', 'lm', 'tm' ] @staticmethod def list(): return sorted([ MMTEngine(name=name) for name in os.listdir(scripts.ENGINES_DIR) if os.path.isdir(os.path.join(scripts.ENGINES_DIR, name)) ], key=lambda x: x.name) def __init__(self, langs=None, name=None): self.name = name if name is not None else 'default' self.source_lang = langs[0] if langs is not None else None self.target_lang = langs[1] if langs is not None else None self._lm_type = None # Injected self._aligner_type = None # Injected self._enable_tag_projection = None # Injected self._config = None self.path = os.path.join(scripts.ENGINES_DIR, self.name) self.data_path = os.path.join(self.path, 'data') self.models_path = os.path.join(self.path, 'models') self._config_file = os.path.join(self.path, 'engine.ini') self._pt_model = os.path.join(self.models_path, 'phrase_tables') self._lm_model = os.path.join(self.models_path, 'lm', 'target.lm') self._context_index = os.path.join(self.models_path, 'context', 'index') self._moses_ini_file = os.path.join(self.models_path, 'moses.ini') self._runtime_path = os.path.join(scripts.RUNTIME_DIR, self.name) self._logs_path = os.path.join(self._runtime_path, 'logs') self._temp_path = os.path.join(self._runtime_path, 'tmp') self.builder = _MMTEngineBuilder(self) self._optimal_weights = None def exists(self): return os.path.isfile(self._config_file) def _on_fields_injected(self, injector): if self.target_lang is None or self.source_lang is None: config = self.config if config is not None: self.target_lang = config.get(self.injector_section, 'target_lang') self.source_lang = config.get(self.injector_section, 'source_lang') if self.target_lang is None or self.source_lang is None: raise IllegalStateException( 'Engine target language or source language must be specified') if self._lm_type is None: self._lm_type = LanguageModel.available_types[0] if self._aligner_type is None: self._aligner_type = WordAligner.available_types[0] self.training_preprocessor = TrainingPreprocessor() self.preprocessor = Preprocessor() self.analyzer = injector.inject(ContextAnalyzer(self._context_index)) self.cleaner = TMCleaner() self.pt = injector.inject( SuffixArraysPhraseTable(self._pt_model, (self.source_lang, self.target_lang))) self.aligner = injector.inject( WordAligner.instantiate(self._aligner_type)) self.lm = injector.inject( LanguageModel.instantiate(self._lm_type, self._lm_model)) self.moses = injector.inject(Moses(self._moses_ini_file)) self.moses.add_feature(MosesFeature('UnknownWordPenalty')) self.moses.add_feature(MosesFeature('WordPenalty')) self.moses.add_feature(MosesFeature('Distortion')) self.moses.add_feature(MosesFeature('PhrasePenalty')) self.moses.add_feature(self.pt, 'PT0') self.moses.add_feature(LexicalReordering(), 'DM0') self.moses.add_feature(self.lm, 'MuxLM') self._optimal_weights = { 'MuxLM': [0.0996981], 'DM0': [ 0.0940416, 0.0324946, 0.0884611, 0.0543363, 0.0258349, 0.107731, 0.102477, 0.0989888 ], 'Distortion0': [8.84199E-4], 'WordPenalty0': [-0.118122], 'PhrasePenalty0': [6.17961E-4], 'PT0': [0.0024974, 0.0102446, 0.0600407, 0.0316664, 0.071863], } if self._config is None: self._config = injector.to_config() self._config.set(self.injector_section, 'source_lang', self.source_lang) self._config.set(self.injector_section, 'target_lang', self.target_lang) @property def config(self): if self._config is None and os.path.isfile(self._config_file): self._config = ConfigParser() self._config.read(self._config_file) return self._config def write_configs(self): self.moses.create_ini() self.write_engine_config() def write_engine_config(self): with open(self._config_file, 'wb') as out: out.write("[%s]\n" % self.injector_section) for (key, value) in self._config.items(self.injector_section): if value is not None: key = " = ".join((key, str(value).replace('\n', '\n\t'))) out.write("%s\n" % key) out.write("\n") if self._optimal_weights is not None and len( self._optimal_weights ) > 0 and not 'weights' in self._config.sections(): out.write('[weights]\n') for name, weights in self._optimal_weights.iteritems(): out.write(name) out.write(' = ') out.write(' '.join([str(w) for w in weights])) out.write('\n') def get_logfile(self, name, ensure=True): if ensure and not os.path.isdir(self._logs_path): fileutils.makedirs(self._logs_path, exist_ok=True) logfile = os.path.join(self._logs_path, name + '.log') if ensure and os.path.isfile(logfile): os.remove(logfile) return logfile def get_runtime_path(self): return self._runtime_path def get_tempdir(self, name, ensure=True): if ensure and not os.path.isdir(self._temp_path): fileutils.makedirs(self._temp_path, exist_ok=True) folder = os.path.join(self._temp_path, name) if ensure: shutil.rmtree(folder, ignore_errors=True) os.makedirs(folder) return folder def clear_tempdir(self, subdir=None): path = os.path.join(self._temp_path, subdir) if subdir is not None else self._temp_path shutil.rmtree(path, ignore_errors=True)
def readconfig(file='config.conf', inputs=None, conversion_table=None): """Reading config file. There is a 'global' wetterturnier backend config file which is necessary to handle all the actions. This method also checks some parameters. E.g., if a required directory or file does not exist, the script stops. Args: file (:obj:`str`): string File name of the config file. Default is ``config.conf``. inputs (:obj:`dict`): Usually the input dict from :meth:`utils.inputcheck`. Default is None. If it is a dict: all parameters will be added to the config dict which will be generated in this method. In case a key exists in the config dict (created in here) and is duplicated in the inputs dict the script will stop immediately. Returns: dict: A dict containing all necessary configs. """ import sys, os import utils if not os.path.isfile(file): utils.exit('Cannot read file %s. Not readable or not existing' % file) # - Import ConfigParser from ConfigParser import ConfigParser CNF = ConfigParser() CNF.read(file) # - Checks if directory exists. def check_directory(name): if not os.path.isdir(name): utils.exit( 'Directory %s does not exist as requried by config file.' % name) # - Checks if file exists. def check_file(name): if not os.path.isfile(name): utils.exit('File %s does not exist as requried by config file.' % name) # ---------------------------------------------------------------- # - Reading mysql config config = {} config['conversion_table'] = conversion_table try: config['mysql_host'] = CNF.get('database', 'mysql_host') config['mysql_user'] = CNF.get('database', 'mysql_user') config['mysql_pass'] = CNF.get('database', 'mysql_pass') config['mysql_db'] = CNF.get('database', 'mysql_db') config['mysql_prefix'] = CNF.get('database', 'mysql_prefix') config['mysql_obstable'] = CNF.get('database', 'mysql_obstable') except: utils.exit( 'Problems reading the database config from the config file %s' % file) # ---------------------------------------------------------------- # - Reading migration flags try: config['migrate_groups'] = CNF.getboolean('migrate', 'groups') config['migrate_mitspieler'] = CNF.getboolean('migrate', 'mitspieler') config['migrate_mitspielerfile'] = CNF.get('migrate', 'mitspielerfile') config['migrate_wpconfig'] = CNF.get('migrate', 'wpconfig') tmp = CNF.get('migrate', 'citytags') config['migrate_citytags'] = [] for elem in tmp.split(','): config['migrate_citytags'].append(elem.strip()) except: config['migrate_mitspieler'] = False config['migrate_groups'] = False config['migrate_citytags'] = None # ---------------------------------------------------------------- # - datelock if set try: config['datelock'] = CNF.get('migrate', 'datelock') except: config['datelock'] = None # - Whether the system is allowed to create users or not try: config['allow_create_users'] = CNF.getboolean('migrate', 'allow_create_users') except: config['allow_create_users'] = False # ---------------------------------------------------------------- # - Loading operational and test judgingclass try: config['judging_operational'] = CNF.get('judging', 'operational') config['judging_test'] = CNF.get('judging', 'test') except: utils.exit('Problems reading necessary judging config!') # ---------------------------------------------------------------- # - Some configs where the data are. # data_moses: where Klaus Knuepffer stores the moses equations try: config['data_moses'] = CNF.get('data', 'moses') except: utils.exit('Problems rading all required data infos from config file') if not os.path.isdir(config['data_moses']): print "[WARNING] Could not find directory %s necessary for ComputeMoses" % config[ 'data_moses'] print " ComputeMoes will crash!" try: config['data_moses_out'] = CNF.get('data', 'moses_out') # If folder does not exist: ignore if not os.path.isdir(config['data_moses_out']): print "[WARNING] Output directory for moses (moses_out=\"{0:s}\")".format( config['data_moses_out']) print " does not exist, ignore!" config['data_moses_out'] = None except: utils.exit( 'No [data] modes_out directory set, will not copy files to webserver.' ) config['data_moses_out'] = None # ---------------------------------------------------------------- # - The rawdir is used by archive.py to import old # wetterturnier data. Should never be used in the final version. try: config['rawdir'] = CNF.get('system', 'rawdir') except: utils.exit( 'Problems rading all required system infos from config file') # ---------------------------------------------------------------- # - Reading all stations tmp = CNF.items('stations') stn = {} for elem in tmp: stn[elem[0]] = int(elem[1]) config['stations'] = stn # ---------------------------------------------------------------- # - Adding inputs if set if not inputs == None: for k in inputs.keys(): # - Duplicated? if k in config.keys(): utils.exit("inputs dict contains keys which are generated in this " + \ "method as well. Duplication! Exit. Key is: %s" % k) # - Else append if inputs[k] == 'None': config[k] = None else: config[k] = inputs[k] return config
class MailboxFactory: """ MailboxFactory is a factory class for Mailbox objects. You can define mailboxes of different types in an INI-style config file (the file has to parsable by ConfigParser.ConfigParser; the exceptions defined in ConfigParser may be thrown if the config file is not well-formed.) Each section in the config file describes one mailbox. An example of a valid config file 'mailboxes.cfg' is the following: [Standard] type = IMAP mailbox = INBOX server = mail.physik.fu-berlin.de username = goerz password = secret ssl = True port = 933 [Sent] type = IMAP mailbox = Sent server = mail.physik.fu-berlin.de username = goerz password = secret [Backup] type = mbox path = /home/goerz/Mail/backup.mbox The type of the mailbox is described by the 'type' parameters. The types known by default are 'imap', 'mbox', 'maildir', 'MH', 'Babyl', and 'MMDF', all of which have corresponding subclasses of mailbox.Mailbox (all except ImapMailbox are defined in the standard library). The type specification is not case sensitive. The remaining parameters in a specific section depend on the type. The Mailbox classes from the standard library need only a path; IMAP needs type, mailbox, server, username, and password. The ssl and port parameters are optional. ssl is enabled by default; the port, if unspecified, is the standard port (933 for ssl, 433 otherwise). MailboxFactory has capabilities to extend the set of known types by using the set_type method. The MailboxFactory partly supports a read-only dictionary interface. """ def __init__(self, configfilename): """ Initialize MailboxFactory files. The mailbox objects that can be generated must be described in configfilename. """ self._types = {} self.set_type('mbox', mailbox.mbox, standard_pathgenerator) self.set_type('maildir', mailbox.Maildir, standard_pathgenerator) self.set_type('mh', mailbox.MH, standard_pathgenerator) self.set_type('babyl', mailbox.Babyl, standard_pathgenerator) self.set_type('mmdf', mailbox.MMDF, standard_pathgenerator) self.set_type('imap', ImapMailbox, imap_pathgenerator) self._configparser = ConfigParser() self._configparser.read(configfilename) def get(self, name): """ Create the Mailbox object that is described in section 'name' in the config file. For example, >>> mailboxes = MailboxFactory("mailboxes.cfg") >>> mb = mailboxes.get('Standard') mb would now be an object of type ImapMailbox if mailboxes.cfg contained the data as the example in the class docstring. """ mailboxtype = self._configparser.get(name, 'type').lower() if not mailboxtype in self._types.keys(): raise UnknownMailboxTypeError, "Unknown type: %s" % mailboxtype factory, pathgenerator = self._types[mailboxtype] path = pathgenerator(dict(self._configparser.items(name))) return (factory(path)) def __getitem__(self, name): """ Shorthand for the get method. For example, >>> mailboxes = MailboxFactory("mailboxes.cfg") >>> mb = mailboxes['Standard'] """ return self.get(name) def get_server(self, name): """ Return an ImapServer instance from the server data that is described in section 'name'. The section must have the form of an imap mailbox (as described above). A TypeError will be raised if the section is not of type IMAP. The 'mailbox' key is ignored. For example, you could create an ImapServer like this: >>> mailboxes = MailboxFactory("mailboxes.cfg") >>> server = mailboxes.get_server('StandardServer') """ mailboxtype = self._configparser.get(name, 'type').lower() if mailboxtype != 'imap': raise TypeError, "You can only create a server from an IMAP mailbox" factory, pathgenerator = self._types[mailboxtype] path = pathgenerator(dict(self._configparser.items(name))) return (path[0]) def __contains__(self, name): """ Return True if there is a mailbox with the given name, False otherwise """ return (name in self._configparser.sections()) def list(self): """ List all mailboxes defined in the config file """ return self._configparser.sections() def data(self, name): """ List all the data associated with the mailbox name """ return self._configparser.items(name) def set_type(self, typename, factory, pathgenerator): """ Make a new typename of Mailbox known. This allows you to handle new types of Mailbox objects beyond IMAP and the mailboxes of the standard library. factory is the class that generates the Mailbox object and must be a subclass of mailbox.Mailbox pathgenerator is a callable that receives a dict of options set in a section of the config file, and returns the 'path' that is passed as the first argument to the factory. For the standard mailboxes of the standard library, the 'path' is just a string, the path of the mailbox in the filesystem. For IMAP, the path is a tuple (server, name). For new types, this may be anything. For example the constructor of this class makes the 'mbox' type known as: self.set_type('mbox', mailbox.mbox, standard_pathgenerator) In combination, factory(pathgenerator(dict_of_options_in_configfile_section)) should create a Mailbox object of the appropriate type. """ if not issubclass(factory, mailbox.Mailbox): raise FactoryIsNotMailboxTypeError if not callable(pathgenerator): raise PathgeneratorNotCallableError self._types[str(typename).lower()] = (factory, pathgenerator)
#!/usr/bin/env python import glob import os from setuptools import find_packages, setup # Get some values from the setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read(['setup.cfg']) metadata = dict(conf.items('metadata')) # Metadata PACKAGENAME = metadata.get('package_name', 'src') DESCRIPTION = metadata.get('description', '{{cookiecutter.short_description}}') AUTHOR = metadata.get('author', '{{cookiecutter.author_name}}') AUTHOR_EMAIL = metadata.get('author_email', '{{cookiecutter.author_email}}') LICENSE = metadata.get('license', 'unknown') VERSION = metadata.get('version', '0.0.0dev') URL = metadata.get('url', '') # Long Description readme_glob = 'README*' _cfg_long_description = metadata.get('long_description', '') if _cfg_long_description: LONG_DESCRIPTION = _cfg_long_description
a_h_path = os.path.abspath(os.path.join('..', 'astropy_helpers')) if os.path.isdir(a_h_path): sys.path.insert(1, a_h_path) # Load all of the global Astropy configuration from astropy_helpers.sphinx.conf import * # Get configuration information from setup.cfg # Get some values from the setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')]) setup_cfg = dict(conf.items('metadata')) # -- General configuration ---------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.2' # To perform a Sphinx version check that needs to be more specific than # major.minor, call `check_sphinx_version("x.y.z")` here. # check_sphinx_version("1.2.1") # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns.append('_templates') # This is added to the end of RST files - a good place to put substitutions to
def configure(self): defaults = {} defaults["message"] = "UNCLASSIFIED" defaults["fgcolor"] = "#FFFFFF" defaults["bgcolor"] = "#007A33" defaults["face"] = "liberation-sans" defaults["size"] = "small" defaults["weight"] = "bold" defaults["show_top"] = True defaults["show_bottom"] = True defaults["hres"] = 0 defaults["vres"] = 0 defaults["sys_info"] = False defaults["opacity"] = 0.75 defaults["esc"] = True defaults["spanning"] = False # Check if a configuration file was passed in from the command line default_heading = DEFAULTSECT conf_parser = ArgumentParser( formatter_class=RawDescriptionHelpFormatter, add_help=False) conf_parser.add_argument("-c", "--config", help="Specify the configuration file", metavar="FILE") conf_parser.add_argument("--heading", help="Specify the config. section to use.", default=default_heading) options, args = conf_parser.parse_known_args() config_file = None if options.config: config_file = os.path.abspath(options.config) if not os.path.isfile(config_file): print("ERROR: Specified configuration file does not exist.") sys.exit(1) else: config_file = os.path.abspath(CONF_FILE) if not os.path.isfile(config_file): config_file = None # In order to maintain backwards compatibility with the way the # previous configuration file format, a dummy section may need # to be added to the configuration file. If this is the case, # a temporary file is used in order to avoid overwriting the # user's configuration. config = ConfigParser() if config_file is not None: fp = open(config_file, "r") while True: try: if python.major is 3: config.read_file(fp, source=config_file) else: config.readfp(fp, config_file) break except MissingSectionHeaderError: # Recreate the file, adding a default section. fp.close() fp = TemporaryFile() with open(config_file) as original: fp.write("[%s]\n" % default_heading + original.read()) fp.seek(0) fp.close() # If this was a temporary file it will now be deleted. # ConfigParser treats everything as strings and any quotation # marks in a setting are explicitly added to the string. # One way to fix this is to add everything to the defaults and # then strip the quotation marks off of everything. defaults.update(dict(config.items(options.heading))) for key, val in defaults.items(): if config.has_option(options.heading, key): defaults[key] = val.strip("\"'") # TODO: This coercion section is hacky and should be fixed. for key in ["show_top", "show_bottom", "sys_info", "esc", "spanning"]: if config.has_option(options.heading, key): defaults[key] = config.getboolean(options.heading, key) for key in ["hres", "vres"]: if config.has_option(options.heading, key): defaults[key] = config.getint(options.heading, key) for key in ["opacity"]: if config.has_option(options.heading, key): defaults[key] = config.getfloat(options.heading, key) # Use the global config to set defaults for command line options parser = ArgumentParser(parents=[conf_parser]) parser.add_argument("-m", "--message", default=defaults["message"], help="Set the Classification message") parser.add_argument("-f", "--fgcolor", default=defaults["fgcolor"], help="Set the Foreground (text) color") parser.add_argument("-b", "--bgcolor", default=defaults["bgcolor"], help="Set the Background color") parser.add_argument("-x", "--hres", default=defaults["hres"], type=int, help="Set the Horizontal Screen Resolution") parser.add_argument("-y", "--vres", default=defaults["vres"], type=int, help="Set the Vertical Screen Resolution") parser.add_argument( "-o", "--opacity", default=defaults["opacity"], type=float, dest="opacity", help="Set the window opacity for composted window managers") parser.add_argument("--face", default=defaults["face"], help="Font face") parser.add_argument("--size", default=defaults["size"], help="Font size") parser.add_argument("--weight", default=defaults["weight"], help="Set the Font weight") parser.add_argument("--disable-esc-msg", default=defaults["esc"], dest="esc", action="store_false", help="Disable the 'ESC to hide' message") parser.add_argument("--hide-top", default=defaults["show_top"], dest="show_top", action="store_false", help="Disable the top banner") parser.add_argument("--hide-bottom", default=defaults["show_bottom"], dest="show_bottom", action="store_false", help="Disable the bottom banner") parser.add_argument("--system-info", default=defaults["sys_info"], dest="sys_info", action="store_true", help="Show user and hostname in the top banner") parser.add_argument( "--enable-spanning", default=defaults["spanning"], dest="spanning", action="store_true", help="Enable banner(s) to span across screens as a single banner") options = parser.parse_args() return options
builtins._ASTROPY_SETUP_ = True from astropy_helpers.setup_helpers import (register_commands, get_debug_option, get_package_info) from astropy_helpers.git_helpers import get_git_devstr from astropy_helpers.version_helpers import generate_version_py # Get some values from the setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read(['setup.cfg']) metadata = dict(conf.items('metadata')) PACKAGENAME = metadata.get('package_name', 'mosviz') DESCRIPTION = metadata.get('description', 'MOS visualization tool') AUTHOR = metadata.get('author', 'JDADF Developers') AUTHOR_EMAIL = metadata.get('author_email', '') LICENSE = metadata.get('license', 'unknown') URL = metadata.get('url', 'https://github.com/spacetelescope/mosviz') # order of priority for long_description: # (1) set in setup.cfg, # (2) load LONG_DESCRIPTION.rst, # (3) load README.rst, # (4) package docstring readme_glob = 'README*' _cfg_long_description = metadata.get('long_description', '')