def rescan_main(): opts = parse_args() try: config = RawConfigParser() config.read(opts.config_file) config.options("master-public-keys") except NoSectionError: print("ERROR: Non-existant configuration file {}".format( opts.config_file)) return logger = logging.getLogger('ELECTRUMPERSONALSERVER') logger, logfilename = logger_config(logger, config) logger.info('Starting Electrum Personal Server rescan script') logger.info('Logging to ' + logfilename) logger.warning("The seperate rescan script is deprecated, use " + "`electrum-personal-server --rescan` instead.") try: rpc_u = config.get("bitcoin-rpc", "rpc_user") rpc_p = config.get("bitcoin-rpc", "rpc_password") except NoOptionError: rpc_u, rpc_p = obtain_rpc_username_password(config.get( "bitcoin-rpc", "datadir")) if rpc_u == None: return rpc = JsonRpc(host = config.get("bitcoin-rpc", "host"), port = int(config.get("bitcoin-rpc", "port")), user = rpc_u, password = rpc_p, wallet_filename=config.get("bitcoin-rpc", "wallet_filename").strip()) rescan_script(logger, rpc)
def _has_required_metadata(setup_cfg): config = RawConfigParser() config.read([setup_cfg], encoding="utf8") return ( config.has_section("metadata") and "name" in config.options("metadata") and "version" in config.options("metadata") )
class Parser(object): _DEFAULT_CONFIG_PATH = './config.ini' def __init__(self, path=_DEFAULT_CONFIG_PATH): self._parser = RawConfigParser() self._parser.read(path) def parse_tasks(self): tasks = [] for sect in self._parser.sections(): if not sect.startswith('task'): continue date = self._parser.getint(sect, "date") sess = self._parser.getint(sect, "session") tasks.append(Task(date, sess)) return tasks def parse_config(self, tag="client"): keys = self._parser.options(tag) config = dict() for k in keys: raw = self._parser.get(tag, k) try: raw = float(raw) except: pass finally: config[k] = raw return config
def read_raw_parser(self, filename): blocks_set = [] file_parser = RawConfigParser() if os.getenv("PINGUINO_PYTHON") is "2": if type(filename) in [str, unicode]: file_parser.readfp(codecs.open(filename, "r", encoding="utf-8")) else: file_parser = filename elif os.getenv("PINGUINO_PYTHON") is "3": if type(filename) == str: file_parser.readfp(codecs.open(filename, "r", encoding="utf-8")) else: file_parser = filename sections = file_parser.sections() for section in sections: options = file_parser.options(section) block = {} for option in options: file_parser.get(section, option) value = file_parser.get(section, option) if os.getenv("PINGUINO_PYTHON") is "2": if (type(value) in [str, unicode]) and (value[0] in ["[", "("]): block[option] = eval(file_parser.get(section, option)) else: block[option] = file_parser.get(section, option) elif os.getenv("PINGUINO_PYTHON") is "3": if (type(value) == str) and (value[0] in ["[", "("]): block[option] = eval(file_parser.get(section, option)) else: block[option] = file_parser.get(section, option) blocks_set.append(block) return blocks_set
def read_merged(self, filenames, encoding=None): cfg = [] for filename in filenames: _cfg = RawConfigParser() _cfg.read(filename) cfg.append(_cfg) for _cfg in cfg: for section in _cfg.sections(): if not self.has_section(section): self.add_section(section) for option in _cfg.options(section): value = _cfg.get(section, option) if ";" in value: current = self.getdefault(section, option, "") if ";" in current: val = [] for v in value.split(";"): if v and v not in val: val.append(v) for v in self.getlist(section, option): if v and v not in val: val.append(v) self.set(section, option, ";".join(val) + ";") continue self.set(section, option, value)
def read_config_file(self): """Read in the configuration file and return Configuration objects for it and the config_source. """ config = Configuration() config_source = Configuration() if self.config_file.exists(): raw_config = RawConfigParser() raw_config.read(str(self.config_file)) # Iterate over the config file options and write them into config for section in raw_config.sections(): for option in raw_config.options(section): value = raw_config.get(section, option) # Coerce values into useful datatypes if value.lower() in ['yes', 'true', 'on']: value = True elif value.lower() in ['no', 'false', 'off']: value = False elif value.lower() in ['none']: continue elif value.replace('.', '').isdigit(): if '.' in value: value = Decimal(value) else: value = int(value) config[section][option] = value config_source[section][option] = 'config_file' return config, config_source
def populate_config_dict(config_path): """Load the configuration file into the config_file dictionary A ConfigParser-style configuration file can have multiple sections, but we ignore the section distinction and load the key/value pairs from all sections into a single key/value list. """ try: config_dict = {} parser = RawConfigParser() parser.optionxform = lambda x: x parser.read(config_path) sections = parser.sections() for section in sections: options = parser.options(section) for option in options: config_dict[option] = str(parser.get(section, option)) except Exception as e: logger.warning("Could not load configuration file due to exception. " "Only environment variable equivalents will be used.") return None for key in config_dict.keys(): if config_dict[key] == '': config_dict[key] = None elif isinstance(config_dict[key], str): config_dict[key] = os.path.expanduser(config_dict[key]) return config_dict
def read_config_file(filename): """ Reads a configuration file to modify the global settings. :param filename: cfg file pathname, read through os.path.normpath """ global LOG_FORMAT, LOG_FILENAME, STATE_REGEX, ZIP_REGEX # Config parser object, load settings for global variables config = RawConfigParser() config.read(path.normpath(filename)) # Sections should be "log" and "validators" for section in config.sections(): # Options for log: format, output # Options for validators: zip_code, state for option in config.options(section): if section == "log" and option == "format": LOG_FORMAT = config.get(section, option) elif section == "log" and option == "output": LOG_FILENAME = config.get(section, option) elif section == "validators" and option == "state": STATE_REGEX = compile(config.get(section, option)) elif section == "validators" and option == "zip_code": ZIP_REGEX = compile(config.get(section, option))
def read_config_info(ini_file, debug): """ Read the INI file Args: ini_file - path to the file Returns: A dictionary of stuff from the INI file Exits: 1 - if problems are encountered """ try: config = RawConfigParser() config.optionxform = lambda option: option config.read(ini_file) the_stuff = {} for section in config.sections(): the_stuff[str(section)] = {} for option in config.options(section): the_stuff[str(section)][str(option)] = str( config.get(section, option.replace('\n', ''))) if debug: print('ini data: ' + str(the_stuff)) return the_stuff except Exception as wtf: logging.error('Exception caught in read_config_info(): {}'.format(wtf)) traceback.print_exc(file=sys.stdout) return sys.exit(1)
def read_config_info(self, ini_file): """ Read the INI file Args: ini_file - path to the file Returns: A dictionary of stuff from the INI file Exits: 1 - if problems are encountered """ try: config = RawConfigParser() config.optionxform = lambda option: option config.read(ini_file) the_stuff = {} for section in config.sections(): the_stuff[section] = {} for option in config.options(section): the_stuff[section][option] = config.get(section, option) return the_stuff except Exception as wtf: logging.error( 'Exception caught in read_config_info(): {}'.format(wtf)) traceback.print_exc(file=sys.stdout) return sys.exit(1)
def get_tornado_ports(config_file: configparser.RawConfigParser) -> List[int]: ports = [] if config_file.has_section("tornado_sharding"): ports = [int(port) for port in config_file.options("tornado_sharding")] if not ports: ports = [9800] return ports
def load(paths=()): """Load configuration from files. ``paths`` a list of the format ``[(PATH, IGNORE_IF_MISSING), ...]``. """ configuration = Configuration(DEFAULT_CONFIG_SCHEMA) for path, ignore_if_missing in paths: parser = RawConfigParser() config_source = "config file %r" % path try: if not parser.read(path): config = Configuration.SOURCE_MISSING if not ignore_if_missing: raise RuntimeError("No such file: %r" % path) else: config = { s: {o: parser[s][o] for o in parser.options(s)} for s in parser.sections() } except Exception as e: raise RuntimeError("Failed to load %s: %s" % (config_source, e)) from e configuration.update(config, config_source, internal=False) return configuration
class ConfigHandler: def __init__(self, file_name): self.conf = RawConfigParser() self.file_name = os.path.join(conf_loc, file_name) self.conf.read(self.file_name, encoding='utf-8') def get_sections(self): """获取所有的sections""" return self.conf.sections() def get_options(self, section): """获取section下所有的option""" return self.conf.options(section) def read(self, section, option): """读取section,下option的值""" return self.conf.get(section, option) def write(self, section, option, value): """1、判断该片段是否存在,不存在新增加一个节点 2、该节点存在的话,如果键存在,更新值 3、键不存在,新增一个键,并为其赋值 """ if not self.conf.has_section(section): self.conf.add_section(section) self.conf.set(section, option, value) with open(self.file_name, 'w', encoding='utf-8') as f: self.conf.write(f) def return_data(self, section, option): """将查询到的数据范围为原类型""" result = self.read(section, option) return eval(result)
def _build_config_from_file(self): """ Handles building config dict from the standard config file, typically `config.ini`. :returns: The config object from the default config file :rtype: dict """ config_file = path.join(self._get_current_dir(), "config.ini") if not path.exists(config_file): message = ( "Expected to see config file at {}. \n\n" "Copy `config.sample.ini` to `config.ini`, and update the required \n" "fields such as username, password, and hostname. \n" ).format(config_file) sys.stderr.write(message) sys.exit(1) # Uses the RawConfigParser to prevent interpolation parser = RawConfigParser() parser.read(config_file) # Convert configparser into regular dict config = {} for section in parser.sections(): config[section] = {} for opt in parser.options(section): config[section][opt] = parser.get(section, opt) return config
def read(self, app_name='Alignak-WebUI'): # pylint: disable=too-many-nested-blocks """ Read configuration file Tries to load a configuration from the following files: - /usr/local/etc/alignak_webui/settings.cfg (FreeBSD) - /etc/alignak_webui/settings.cfg (Debian) - ~/alignak_webui/settings.cfg (Current user home directory) - ./etc/settings.cfg (Current working directory etc sub directory) - ./settings.cfg (Current working directory) - cfg_file parameter This list of file is used by the Python ConfigParser to build the application configuration. The parameters found in the sections of the configuration are stored in the global ``settings`` dictionary. A variable named *var* in the section *section* is stored with the key *section.var* of the ``settings`` dictionary. As of it, parameters of the *ui* section are all prefixed with *ui.* ... Returns None if no configuration file could be found, else returns ConfigParser object :param app_name: application name (to build configuration file name) """ app_name = app_name.lower() settings_filenames = self.filenames if not isinstance(self.filenames, list): settings_filenames = [os.path.abspath(self.filenames)] try: config = RawConfigParser() found_cfg_file = config.read(settings_filenames) if found_cfg_file: # Build settings dictionnary for application parameters for section in config.sections(): for option in config.options(section): if app_name == section.lower(): self[option] = config.get(section, option) if self[option] in ['True', 'true']: self[option] = True if self[option] in ['False', 'false']: self[option] = False else: self[section + '.' + option] = config.get( section, option) if self[section + '.' + option] in ['True', 'true']: self[section + '.' + option] = True if self[section + '.' + option] in ['False', 'false']: self[section + '.' + option] = False return found_cfg_file except Exception as exp: # pragma: no cover - bad formated file print("Bad formed configuration file.") print("Exception: %s" % str(exp)) print("Traceback: %s" % traceback.format_exc()) return None
def load(paths=()): """ Create instance of ``Configuration`` for use with ``radicale.app.Application``. ``paths`` a list of configuration files with the format ``[(PATH, IGNORE_IF_MISSING), ...]``. If a configuration file is missing and IGNORE_IF_MISSING is set, the config is set to ``Configuration.SOURCE_MISSING``. The configuration can later be changed with ``Configuration.update()``. """ configuration = Configuration(DEFAULT_CONFIG_SCHEMA) for path, ignore_if_missing in paths: parser = RawConfigParser() config_source = "config file %r" % path try: if not parser.read(path): config = Configuration.SOURCE_MISSING if not ignore_if_missing: raise RuntimeError("No such file: %r" % path) else: config = { s: {o: parser[s][o] for o in parser.options(s)} for s in parser.sections() } except Exception as e: raise RuntimeError("Failed to load %s: %s" % (config_source, e)) from e configuration.update(config, config_source) return configuration
class ConfigManager(object): __package__ = 'applibs' def __init__(self, configFile): self.ConfigFile = configFile self.Config = RawConfigParser() self.Config.optionxform = str self.Config.read(configFile) def getValue(self, sectionName, propertyName): return self.Config.get(sectionName, propertyName) def getKeys(self, sectionName): return self.Config.options(sectionName) def getConfigMap(self): configMap = OrderedDict() for s in self.Config.sections(): sectionMap = {} for k in self.Config.options(s): sectionMap[k] = self.Config.get(s, k) configMap[s] = sectionMap return configMap def setValue(self, section, key, value): self.Config.set(section, key, value) file = open(self.ConfigFile, 'w') self.Config.write(file) file.close() def writeConfigMap(self, configMap): for s in configMap: self.Config.add_section(s) for k in configMap[s]: self.Config.set(s, k, configMap[s][k]) file = open(self.ConfigFile, 'w') self.Config.write(file) file.close() def save(self): file = open(self.ConfigFile, 'w') self.Config.write(file) file.close()
def set_proj_config(self): """ Sets some project config related attributes from the project's config file. """ proj_path = self.get_proj_path() if proj_path is None: raise ValueError('Project path must be set before setting project configuration') config_path = proj_path + '/config.cfg' proj_config = RawConfigParser(allow_no_value=True) proj_config.optionxform = str proj_config.read(config_path) self.ROI_DEFS = proj_config.options('ROI_DEFS') self.STIM_DEFS = proj_config.options('STIM_DEFS') self.CUSTOM_COLUMNS = proj_config.options('CUSTOM_COLUMNS')
def get_links_list(comic_info: RawConfigParser): link_list = [] for option in comic_info.options("Links Bar"): link_list.append({ "name": option, "url": path(comic_info.get("Links Bar", option)) }) return link_list
def get_pages_list(comic_info: RawConfigParser): page_list = [] for option in comic_info.options("Pages"): page_list.append({ "template_name": option, "title": path(comic_info.get("Pages", option)) }) return page_list
def _read_configuration_file(self, path): """Try to read and parse `path` as a configuration file. If the configurations were illegal (checked with `self._validate_options`), raises `IllegalConfiguration`. Returns (options, should_inherit). """ parser = RawConfigParser(inline_comment_prefixes=('#', ';')) options = None should_inherit = True if parser.read(path) and self._get_section_name(parser): all_options = self._parser.option_list[:] for group in self._parser.option_groups: all_options.extend(group.option_list) option_list = dict([(o.dest, o.type or o.action) for o in all_options]) # First, read the default values new_options, _ = self._parse_args([]) # Second, parse the configuration section_name = self._get_section_name(parser) for opt in parser.options(section_name): if opt == 'inherit': should_inherit = parser.getboolean(section_name, opt) continue if opt.replace('_', '-') not in self.CONFIG_FILE_OPTIONS: log.warning("Unknown option '{}' ignored".format(opt)) continue normalized_opt = opt.replace('-', '_') opt_type = option_list[normalized_opt] if opt_type in ('int', 'count'): value = parser.getint(section_name, opt) elif opt_type == 'string': value = parser.get(section_name, opt) else: assert opt_type in ('store_true', 'store_false') value = parser.getboolean(section_name, opt) setattr(new_options, normalized_opt, value) # Third, fix the set-options options = self._fix_set_options(new_options) if options is not None: if not self._validate_options(options): raise IllegalConfiguration('in file: {}'.format(path)) return options, should_inherit
def configuration(): config = RawConfigParser() config.read(os.path.expanduser(configfile)) res = {} # Python still lacks of ConfigParser dictionary returning, this # circumvents that missing feature (no, the undecomented and # faulty ConfigParser._sections doesn’t count …) for section in config.sections(): res[section] = {} for option in config.options(section): res[section][option] = config.get(section, option) return res
def get_config(section): """ Returns a dict of options from the CONFIG_FILE for section """ config_parser = RawConfigParser(interpolation=ExtendedInterpolation()) config_parser.read(CONFIG_FILE) config = {} for option in config_parser.options(section): try: config[option] = config_parser.get(section, option) except KeyError: config[option] = None return config
def getConfigContent(self): content = {} conf = RawConfigParser() conf.read(self.file_path) sections = conf.sections() for section in sections: options = conf.options(section) parametres = {} for option in options: value = conf.get(section, option) parametres.update({option: value}) content.update({section: parametres}) return content
def _read_configuration_file(self, path): """Try to read and parse `path` as a configuration file. If the configurations were illegal (checked with `self._validate_convention`), raises `IllegalConfiguration`. Returns (arguments, should_inherit). """ parser = RawConfigParser(inline_comment_prefixes=("#", ";")) arguments = None should_inherit = True if parser.read(path) and parser.has_section( ConfigurationParser.SECTION_NAME): all_arguments = self._parser._get_optional_actions() argument_list = {arg.dest: arg.type for arg in all_arguments} # First, read the default values new_arguments = self._parse_args([]) # Second, parse the configuration section_name = ConfigurationParser.SECTION_NAME for arg in parser.options(section_name): if arg == "inherit": should_inherit = parser.getboolean(section_name, arg) continue if arg.replace("_", "-") not in self.CONFIG_FILE_ARGUMENTS: log.warning("Unknown option '{}' ignored".format(arg)) continue normalized_arg = arg.replace("-", "_") arg_type = argument_list[normalized_arg] if arg_type is int: value = parser.getint(section_name, arg) elif arg_type == str: value = parser.get(section_name, arg) else: assert arg_type is bool value = parser.getboolean(section_name, arg) setattr(new_arguments, normalized_arg, value) # Third, fix the set-arguments arguments = self._fix_set_arguments(new_arguments) if arguments is not None: if not self._validate_convention(arguments): raise IllegalConfiguration("in file: {}".format(path)) return arguments, should_inherit
def load_language(language): """ Load up a language as a dictionary. """ langdict = dict() if language.find("..") >= 0 or language.find("?") >= 0 or \ language.find("!") >= 0 or language.find("\\") >= 0 or \ language.find("/") >= 0 or language.find("`") >= 0: raise ValueError("language identifier has invalid characters") parser = RawConfigParser() parser.readfp(codecs.open(os.path.abspath(os.path.join("lang", language + ".cfg")), "r", "utf8")) for section in parser.sections(): for option in parser.options(section): langdict[(section + "_" + option).lower()] = \ parser.get(section, option) return langdict
def writeConfig(conf, fileName): ''' Write the configuration file. ''' confParser = RawConfigParser() for k1, v1 in conf.items(): if confParser.has_section(k1): pass else: confParser.add_section(k1) if len(v1) != 0: for k2, v2 in v1.items(): confParser.set(k1, k2, conf[k1][k2]) pprint(conf) print("Update config to: " + fileName) print(confParser.options('envVariables')) with open(fileName, 'wt', encoding='utf-8') as configFile: confParser.write(configFile)
def main(auction_type, action_type, worker_directory_path=CWD, tender_file_path='', run_auction=False, wait_for_result=False, data=''): with open(data, 'r') as f: sample_config = f.read() config = RawConfigParser(allow_no_value=True) config.read_file(io.BytesIO(sample_config)) PARAMS = {} for option in config.options(auction_type): PARAMS[option] = config.get(auction_type, option) auctions_number = int(PARAMS['auctions_number']) initial_number = int(PARAMS['initial_number']) concurency = int(PARAMS['concurency']) start_time = PARAMS['start_time'] time_offset = int(PARAMS['time_offset']) actions = globals() tender_id_base_local = TENDER_DATA[auction_type]['tender_id_base'] if \ not PARAMS['tender_id_base'] else PARAMS['tender_id_base'] tender_file_path = tender_file_path or TENDER_DATA[auction_type]['path'] if action_type in [elem.replace('_', '-') for elem in actions]: if action_type == 'load-testing': load_testing(worker_directory_path, tender_file_path, TENDER_DATA[auction_type]['worker'], TENDER_DATA[auction_type]['config'], auctions_number, initial_number, tender_id_base_local, concurency, run_auction, start_time, time_offset, wait_for_result) else: actions.get(action_type)(worker_directory_path, tender_file_path, TENDER_DATA[auction_type]['worker'], TENDER_DATA[auction_type]['id'], TENDER_DATA[auction_type]['config'], start_time, time_offset, wait_for_result)
def get_package_entry_points(package: str, prefix: str) -> Sequence[Tuple[str, str]]: libs_path = distutils.sysconfig.get_python_lib(prefix=prefix) distrib = distlib.database.DistributionPath([libs_path ]).get_distribution(package) assert distrib is not None, 'package is not installed' entry_points_config = os.path.join(distrib.path, f'entry_points.txt') if not os.path.exists(entry_points_config): return [] parser = RawConfigParser() parser.read(entry_points_config) if not parser.has_section('console_scripts'): return [] return [(key, parser.get('console_scripts', key)) for key in parser.options('console_scripts')]
def read_raw_parser(self, filename): blocks_set = [] file_parser = RawConfigParser() file_parser.readfp(codecs.open(filename, "r", "utf-8")) sections = file_parser.sections() for section in sections: options = file_parser.options(section) block = {} for option in options: file_parser.get(section, option) value = file_parser.get(section, option) if value[0] in ["[", "("]: block[option] = eval(file_parser.get(section, option)) else: block[option] = file_parser.get(section, option) blocks_set.append(block) return blocks_set
def edit_config(filename, settings, dry_run=False): """Edit a configuration file to include `settings` `settings` is a dictionary of dictionaries or ``None`` values, keyed by command/section name. A ``None`` value means to delete the entire section, while a dictionary lists settings to be changed or deleted in that section. A setting of ``None`` means to delete that setting. """ from configparser import RawConfigParser log.debug("Reading configuration from %s", filename) opts = RawConfigParser() opts.read([filename]) for section, options in list(settings.items()): if options is None: log.info("Deleting section [%s] from %s", section, filename) opts.remove_section(section) else: if not opts.has_section(section): log.debug("Adding new section [%s] to %s", section, filename) opts.add_section(section) for option,value in list(options.items()): if value is None: log.debug("Deleting %s.%s from %s", section, option, filename ) opts.remove_option(section,option) if not opts.options(section): log.info("Deleting empty [%s] section from %s", section, filename) opts.remove_section(section) else: log.debug( "Setting %s.%s to %r in %s", section, option, value, filename ) opts.set(section,option,value) log.info("Writing %s", filename) if not dry_run: f = open(filename,'w'); opts.write(f); f.close()
class ConfigFileBackingStore(backing_store.BackingStore): def __init__(self, path): super(ConfigFileBackingStore, self).__init__(path) self.configParser = RawConfigParser() self.configParser.read(self.path) def identifiers(self): return self.configParser.sections() def add_identifier(self, ident): try: self.configParser.add_section(ident) except DuplicateSectionError: raise ValueError("The identifier `%s` already exists" % str(ident)) def remove_identifier(self, ident): self.configParser.remove_section(ident) def keys(self, ident): try: return self.configParser.options(ident) except NoSectionError: raise ValueError("No identifier named `%s` exists" % str(ident)) def get(self, ident, key, default=None): try: val = self.configParser.get(ident, key.lower()) return val except (NoSectionError, NoOptionError): return default def set(self, ident, key, value): self.configParser.set(ident, key.lower(), value) def save(self): try: with open(self.path, "w") as configFile: self.configParser.write(configFile) except IOError: raise IOError("Cannot save data to `%s`. Permission Denied")
def edit_config(filename, settings, dry_run=False): """Edit a configuration file to include `settings` `settings` is a dictionary of dictionaries or ``None`` values, keyed by command/section name. A ``None`` value means to delete the entire section, while a dictionary lists settings to be changed or deleted in that section. A setting of ``None`` means to delete that setting. """ from configparser import RawConfigParser log.debug("Reading configuration from %s", filename) opts = RawConfigParser() opts.read([filename]) for section, options in list(settings.items()): if options is None: log.info("Deleting section [%s] from %s", section, filename) opts.remove_section(section) else: if not opts.has_section(section): log.debug("Adding new section [%s] to %s", section, filename) opts.add_section(section) for option, value in list(options.items()): if value is None: log.debug("Deleting %s.%s from %s", section, option, filename) opts.remove_option(section, option) if not opts.options(section): log.info("Deleting empty [%s] section from %s", section, filename) opts.remove_section(section) else: log.debug("Setting %s.%s to %r in %s", section, option, value, filename) opts.set(section, option, value) log.info("Writing %s", filename) if not dry_run: f = open(filename, 'w') opts.write(f) f.close()
def get_loaded_configurations() -> OrderedDict: """return a list of configured social authentication backends from a config file given in settings.ALLAUTH_APPLICATIONS_CONFIG """ from django.conf import settings parser = RawConfigParser() if os.path.isfile(settings.ALLAUTH_APPLICATIONS_CONFIG): parser.read([settings.ALLAUTH_APPLICATIONS_CONFIG]) existing_providers = get_available_configurations() providers = OrderedDict() for section in parser.sections(): if section not in existing_providers: continue provider_config = existing_providers[section] values = { key: parser.get(section, key) for key in parser.options(section) if key in provider_config.attributes } provider_config.values = values providers[provider_config.provider_id] = provider_config return providers
class APIVersionWriter(TemplateFileWriter): """ Provide useful method to write Java files. """ def __init__(self, monolithe_config, api_info): """ Initializes a _JavaSDKAPIVersionFileWriter """ super(APIVersionWriter, self).__init__(package="monolithe.generators.lang.vro") self.api_version = api_info["version"] self._api_version_string = SDKUtils.get_string_version(self.api_version) self.api_root = api_info["root"] self.api_prefix = api_info["prefix"] self.monolithe_config = monolithe_config self._output = self.monolithe_config.get_option("output", "transformer") self._name = self.monolithe_config.get_option("name", "transformer") self._class_prefix = "" self._product_accronym = self.monolithe_config.get_option("product_accronym") self._product_name = self.monolithe_config.get_option("product_name") self._url = self.monolithe_config.get_option("url", "transformer") self._package_prefix = self._get_package_prefix(self._url) self._package_name = self._package_prefix + ".vro." + self._name self._package_subdir = self._package_name.replace('.', '/') self.output_directory = "%s/vro" % (self._output) self.override_folder = os.path.normpath("%s/__overrides" % self.output_directory) self.fetchers_path = "/fetchers/" self.enums_path = "/enums/" self.attrs_defaults = RawConfigParser() path = "%s/vro/__attributes_defaults/attrs_defaults.ini" % self._output self.attrs_defaults.optionxform = str self.attrs_defaults.read(path) self.inventory_entities = RawConfigParser() path = "%s/vro/__attributes_defaults/inventory_entities.ini" % self._output self.inventory_entities.optionxform = str self.inventory_entities.read(path) self.workflow_attrs = RawConfigParser() path = "%s/vro/__attributes_defaults/workflow_attrs.ini" % self._output self.workflow_attrs.optionxform = str self.workflow_attrs.read(path) self.attrs_types = RawConfigParser() path = "%s/vro/__attributes_defaults/attrs_types.ini" % self._output self.attrs_types.optionxform = str self.attrs_types.read(path) plugin_info = RawConfigParser() path = "%s/vro/__attributes_defaults/plugin.ini" % self._output plugin_info.optionxform = str plugin_info.read(path) self.plugin_version = plugin_info.get(self.api_version, "pluginVersion") workflow_info = RawConfigParser() path = "%s/vro/__attributes_defaults/workflow.ini" % self._output workflow_info.optionxform = str workflow_info.read(path) self.workflow_version = workflow_info.get(self.api_version, "workflowVersion") with open("%s/vro/__code_header" % self._output, "r") as f: self.header_content = f.read() def perform(self, specifications): """ """ self._resolve_parent_apis(specifications) # Temporary fix, see method's comment for more info self._set_local_and_workflow_type(specifications) # Temporary until get_type_name is enhanced to include specificiation subtype and local_name self._write_file(self.output_directory, "pom.xml.tpl", "pom.xml") self._write_o11plugin(specifications) self._write_o11plugin_core(specifications) self._write_o11plugin_package(specifications) def _write_o11plugin(self, specifications): """ """ output_directory = "%s/o11nplugin-%s" % (self.output_directory, self._name.lower()) self._write_file(output_directory, "o11nplugin/pom.xml.tpl", "pom.xml") license_output_directory = "%s/src/main/vmoapp/VSO-INF" % (output_directory) os.makedirs(license_output_directory) copyfile("%s/LICENSE" % (self.output_directory), "%s/vsoapp.txt" % (license_output_directory)); icons_output_directory = "%s/src/main/dar/resources/images" % (output_directory) os.makedirs(icons_output_directory) icons_source_directory = "%s/__icons" % (self.output_directory) self._copyfile("icon-plugin.png", icons_source_directory, icons_output_directory) self._copyfile("icon-session.png", icons_source_directory, icons_output_directory) self._copyfile("icon-folder.png", icons_source_directory, icons_output_directory) for rest_name, specification in specifications.items(): self._copyfile("icon-%s.png" % (specification.entity_name.lower()), icons_source_directory, icons_output_directory) rmtree("%s" % (icons_source_directory)) def _write_o11plugin_core(self, specifications): """ """ output_directory = "%s/o11nplugin-%s-core" % (self.output_directory, self._name.lower()) self._write_file(output_directory, "o11nplugin-core/pom.xml.tpl", "pom.xml") source_output_directory = "%s/src/main/java/%s" % (output_directory, self._package_subdir) self._write_modulebuilder(source_output_directory, package_name=self._package_name) self._write_pluginadaptor(source_output_directory, package_name=self._package_name) self._write_pluginfactory(specifications, source_output_directory, package_name=self._package_name) model_package_name = self._package_name + ".model" model_source_output_directory = "%s/model" % (source_output_directory) self._write_constants(specifications, model_source_output_directory, package_name=model_package_name) self._write_sessionmanager(model_source_output_directory, package_name=model_package_name) self._write_session(specifications, model_source_output_directory, package_name=model_package_name) self._write_modelhelper(specifications, model_source_output_directory, package_name=model_package_name) task_manager = TaskManager() for rest_name, specification in specifications.items(): task_manager.start_task(method=self._write_model, specification=specification, specification_set=specifications, output_directory=model_source_output_directory, package_name=model_package_name) task_manager.start_task(method=self._write_fetcher, specification=specification, specification_set=specifications, output_directory=model_source_output_directory, package_name=model_package_name) for attribute in specification.attributes: if attribute.type == "enum" or attribute.subtype == "enum": task_manager.start_task(method=self._write_enum, specification=specification, attribute=attribute, output_directory=model_source_output_directory, package_name=model_package_name) task_manager.wait_until_exit() def _write_o11plugin_package(self, specifications): """ """ output_directory = "%s/o11nplugin-%s-package" % (self.output_directory, self._name.lower()) self._write_file(output_directory, "o11nplugin-package/pom.xml.tpl", "pom.xml") self._write_file("%s/src/main/resources/META-INF" % (output_directory), "o11nplugin-package/dunes-meta-inf.xml.tpl", "dunes-meta-inf.xml") copyfile("%s/archetype.keystore" % (self.output_directory), "%s/archetype.keystore" % (output_directory)); remove("%s/archetype.keystore" % (self.output_directory)) resources_output_directory = "%s/src/main/resources" % (output_directory) workflows_output_directory = "%s/Workflow" % (resources_output_directory) actions_output_directory = "%s/ScriptModule" % (resources_output_directory) workflow_package = "Session" workflow_directory = "%s/Library/VSPK/Basic/%s" % (workflows_output_directory, workflow_package) self._write_workflow_file(specification=None, specification_set=None, workflow_directory=workflow_directory, template_file="o11nplugin-package/Add Session.element_info.xml.tpl", filename="Add Session.element_info.xml", workflow_type="add", workflow_id=None, attrs_includes=None, attrs_excludes=None, workflow_name="Add Session", workflow_package=workflow_package, parent_spec=None) self._write_workflow_file(specification=None, specification_set=None, workflow_directory=workflow_directory, template_file="o11nplugin-package/Add Session.xml.tpl", filename="Add Session.xml", workflow_type="add", workflow_id=None, attrs_includes=None, attrs_excludes=None, workflow_name = "Add Session", workflow_package=workflow_package, parent_spec=None) self._write_workflow_file(specification=None, specification_set=None, workflow_directory=workflow_directory, template_file="o11nplugin-package/Remove Session.element_info.xml.tpl", filename="Remove Session.element_info.xml", workflow_type="remove", workflow_id=None, attrs_includes=None, attrs_excludes=None, workflow_name = "Remove Session", workflow_package=workflow_package, parent_spec=None) self._write_workflow_file(specification=None, specification_set=None, workflow_directory=workflow_directory, template_file="o11nplugin-package/Remove Session.xml.tpl", filename="Remove Session.xml", workflow_type="remove", workflow_id=None, attrs_includes=None, attrs_excludes=None, workflow_name = "Remove Session", workflow_package=workflow_package, parent_spec=None) for rest_name, specification in specifications.items(): for attribute in specification.attributes: attrs_includes = self._get_entity_list_filter(self.workflow_attrs, specification.entity_name, "includes") attrs_excludes = self._get_entity_list_filter(self.workflow_attrs, specification.entity_name, "excludes") if (attribute.required or attribute.local_name in attrs_includes) and (not attribute.local_name in attrs_excludes): if attribute.type == "enum" or attribute.type == "list": self._write_action_files(specification=specification, attribute=attribute, package_name=self._package_name, output_directory=actions_output_directory) for rest_name, specification in specifications.items(): if not specification.is_root: attrs_includes = self._get_entity_list_filter(self.workflow_attrs, specification.entity_name, "includes") attrs_excludes = self._get_entity_list_filter(self.workflow_attrs, specification.entity_name, "excludes") for parent_api in specification.parent_apis: workflow_package = "Other" if specification.package is None else specification.package.capitalize() if parent_api.rest_name in specifications: parent_spec = specifications[parent_api.rest_name] if parent_spec: entity_excludes = self._get_entity_list_filter(self.inventory_entities, parent_spec.entity_name, "excludes") if specification.entity_name not in entity_excludes: if parent_api.allows_create: self._write_workflow_files(specification=specification, specification_set=specifications, output_directory=workflows_output_directory, workflow_type="add", attrs_includes=attrs_includes, attrs_excludes=attrs_excludes, workflow_name="Add %s to %s" % (specification.entity_name, parent_spec.entity_name), workflow_package=workflow_package, parent_spec=parent_spec) if parent_api.allows_create or parent_spec.is_root: self._write_workflow_files(specification=specification, specification_set=specifications, output_directory=workflows_output_directory, workflow_type="find", attrs_includes=attrs_includes, attrs_excludes=attrs_excludes, workflow_name="Find %s in %s" % (specification.entity_name, parent_spec.entity_name), workflow_package=workflow_package, parent_spec=parent_spec) self._write_workflow_files(specification=specification, specification_set=specifications, output_directory=workflows_output_directory, workflow_type="edit", attrs_includes=attrs_includes, attrs_excludes=attrs_excludes, workflow_name="Edit %s" % (specification.entity_name), workflow_package=workflow_package) self._write_workflow_files(specification=specification, specification_set=specifications, output_directory=workflows_output_directory, workflow_type="remove", attrs_includes=attrs_includes, attrs_excludes=attrs_excludes, workflow_name="Remove %s" % (specification.entity_name), workflow_package=workflow_package) def _write_session(self, specifications, output_directory, package_name): """ """ template_file = "o11nplugin-core/session.java.tpl" base_name = "Session" filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, package_name=package_name, specifications=list(specifications.values()), root_entity=specifications[self.api_root]) def _write_model(self, specification, specification_set, output_directory, package_name): """ Write autogenerate specification file """ template_file = "o11nplugin-core/model.java.tpl" filename = "%s%s.java" % (self._class_prefix, specification.entity_name) override_content = self._extract_override_content(specification.entity_name) superclass_name = "BaseRootObject" if specification.rest_name == self.api_root else "BaseObject" defaults = {} section = specification.entity_name if self.attrs_defaults.has_section(section): for attribute in self.attrs_defaults.options(section): defaults[attribute] = self.attrs_defaults.get(section, attribute) entity_includes = self._get_entity_list_filter(self.inventory_entities, section, "includes") entity_excludes = self._get_entity_list_filter(self.inventory_entities, section, "excludes") entity_name_attr = "id" if self.inventory_entities.has_section(section): if self.inventory_entities.has_option(section, "name"): entity_name_attr = self.inventory_entities.get(section, "name") self.write(destination=output_directory, filename=filename, template_name=template_file, specification=specification, specification_set=specification_set, version=self.api_version, name=self._name, class_prefix=self._class_prefix, product_accronym=self._product_accronym, override_content=override_content, superclass_name=superclass_name, header=self.header_content, version_string=self._api_version_string, package_name=package_name, attribute_defaults=defaults, entity_name_attr=entity_name_attr, root_api=self.api_root, entity_includes=entity_includes, entity_excludes=entity_excludes) return (filename, specification.entity_name) def _write_fetcher(self, specification, specification_set, output_directory, package_name): """ Write fetcher """ template_file = "o11nplugin-core/fetcher.java.tpl" destination = "%s%s" % (output_directory, self.fetchers_path) base_name = "%sFetcher" % specification.entity_name_plural filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=destination, filename=filename, template_name=template_file, specification=specification, specification_set=specification_set, class_prefix=self._class_prefix, product_accronym=self._product_accronym, override_content=override_content, header=self.header_content, name=self._name, version_string=self._api_version_string, package_name=package_name) return (filename, specification.entity_name_plural) def _write_modulebuilder(self, output_directory, package_name): """ """ template_file = "o11nplugin-core/modulebuilder.java.tpl" base_name = "ModuleBuilder" filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, package_name=package_name) def _write_pluginadaptor(self, output_directory, package_name): """ """ template_file = "o11nplugin-core/pluginadaptor.java.tpl" base_name = "PluginAdaptor" filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, package_name=package_name) def _write_pluginfactory(self, specifications, output_directory, package_name): """ """ template_file = "o11nplugin-core/pluginfactory.java.tpl" base_name = "PluginFactory" filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, package_name=package_name, specification_set=specifications, specifications=list(specifications.values())) def _write_constants(self, specifications, output_directory, package_name): """ """ template_file = "o11nplugin-core/constants.java.tpl" base_name = "Constants" filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, product_name=self._product_name, package_name=package_name, specification_set=specifications, specifications=list(specifications.values())) def _write_sessionmanager(self, output_directory, package_name): """ """ template_file = "o11nplugin-core/sessionmanager.java.tpl" base_name = "SessionManager" filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, package_name=package_name) def _write_modelhelper(self, specifications, output_directory, package_name): """ """ template_file = "o11nplugin-core/modelhelper.java.tpl" base_name = "ModelHelper" filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, product_name=self._product_name, package_name=package_name, specification_set=specifications, specifications=list(specifications.values())) def _write_action_files(self, specification, attribute, package_name, output_directory): """ """ action_unique_name = "action-" + specification.entity_name.encode('ascii') + '-get-' + attribute.local_name.encode('ascii') action_id = uuid.uuid5(uuid.NAMESPACE_OID, action_unique_name) action_directory = "%s/%s" % (output_directory, self._package_subdir) if not os.path.exists(action_directory): makedirs(action_directory) action_name = "get%s%s" %(specification.entity_name, attribute.local_name[0:1].upper() + attribute.local_name[1:]) self._write_action_file(specification=specification, attribute=attribute, action_directory=action_directory, template_file="o11nplugin-package/get_entity_attribute_action.element_info.xml.tpl", filename="%s.element_info.xml" % (action_name), action_name=action_name, action_id=action_id) self._write_action_file(specification=specification, attribute=attribute, action_directory=action_directory, template_file="o11nplugin-package/get_entity_attribute_action.xml.tpl", filename="%s.xml" % (action_name), action_name=action_name, action_id=action_id) def _write_action_file(self, specification, attribute, action_directory, template_file, filename, action_name, action_id): """ """ self.write(destination=action_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, api_prefix=self.api_prefix, product_name=self._product_name, name=self._name, header=self.header_content, version_string=self._api_version_string, package_prefix=self._package_prefix, package_name=self._package_name, specification=specification, attribute=attribute, action_name = action_name, action_id=action_id, workflow_version=self.workflow_version) def _write_workflow_files(self, specification, specification_set, output_directory, workflow_type, attrs_includes, attrs_excludes, workflow_name, workflow_package, parent_spec = None): """ """ workflow_unique_name = specification.entity_name.encode('ascii') + '-' + workflow_type + ('-' + parent_spec.entity_name.encode('ascii') if parent_spec else "") workflow_id = uuid.uuid5(uuid.NAMESPACE_OID, workflow_unique_name) workflow_directory = "%s/Library/VSPK/Basic/%s" % (output_directory, workflow_package) if not os.path.exists(workflow_directory): makedirs(workflow_directory) self._write_workflow_file(specification=specification, specification_set=specification_set, workflow_directory=workflow_directory, template_file="o11nplugin-package/%s_workflow.element_info.xml.tpl" % (workflow_type), filename="%s.element_info.xml" % (workflow_name), workflow_type=workflow_type, workflow_id=workflow_id, attrs_includes=attrs_includes, attrs_excludes=attrs_excludes, workflow_name=workflow_name, workflow_package=workflow_package, parent_spec=parent_spec) self._write_workflow_file(specification=specification, specification_set=specification_set, workflow_directory=workflow_directory, template_file="o11nplugin-package/%s_workflow.xml.tpl" % (workflow_type), filename="%s.xml" % (workflow_name), workflow_type=workflow_type, workflow_id=workflow_id, attrs_includes=attrs_includes, attrs_excludes=attrs_excludes, workflow_name=workflow_name, workflow_package=workflow_package, parent_spec=parent_spec) def _write_workflow_file(self, specification, specification_set, workflow_directory, template_file, filename, workflow_type, workflow_id, attrs_includes, attrs_excludes, workflow_name, workflow_package, parent_spec): """ """ self.write(destination=workflow_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, api_prefix=self.api_prefix, product_name=self._product_name, name=self._name, header=self.header_content, version_string=self._api_version_string, package_prefix=self._package_prefix, package_name=self._package_name, specification=specification, specification_set=specification_set, workflow_type=workflow_type, workflow_id=workflow_id, attrs_includes=attrs_includes, attrs_excludes=attrs_excludes, workflow_name=workflow_name, parent_spec=parent_spec, workflow_version=self.workflow_version, workflow_package=workflow_package) def _write_enum(self, specification, attribute, output_directory, package_name): """ Write autogenerate specification file """ enum_name = specification.entity_name + attribute.local_name[0:1].upper() + attribute.local_name[1:] template_file = "o11nplugin-core/enum.java.tpl" destination = "%s%s" % (output_directory, self.enums_path) filename = "%s%s.java" % (self._class_prefix, enum_name) self.write(destination=destination, filename=filename, template_name=template_file, header=self.header_content, specification=specification, package_name=package_name, enum_name=enum_name, attribute=attribute) return (filename, specification.entity_name) def _write_file(self, output_directory, template_file, filename): """ """ self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, api_prefix=self.api_prefix, product_name=self._product_name, name=self._name, header=self.header_content, version_string=self._api_version_string, package_prefix=self._package_prefix, package_name=self._package_name, plugin_version=self.plugin_version) def _extract_override_content(self, name): """ """ # find override file specific_override_path = "%s/%s_%s%s.override.java" % (self.override_folder, self.api_version, self._class_prefix, name.title()) generic_override_path = "%s/%s%s.override.java" % (self.override_folder, self._class_prefix, name.title()) final_path = specific_override_path if os.path.exists(specific_override_path) else generic_override_path # Read override from file override_content = None if os.path.isfile(final_path): override_content = open(final_path).read() return override_content def _get_package_prefix(self, url): "" "" hostname_parts = self._get_hostname_parts(url) package_name = "" for index, hostname_part in enumerate(reversed(hostname_parts)): package_name = package_name + hostname_part if index < len(hostname_parts) - 1: package_name = package_name + '.' return package_name def _get_hostname_parts(self, url): "" "" if url.find("http://") != 0: url = "http://" + url hostname = urlparse(url).hostname hostname_parts = hostname.split('.') valid_hostname_parts = [] for hostname_part in hostname_parts: if hostname_part != "www": valid_hostname_parts.append(hostname_part) return valid_hostname_parts # Custom version of this method until the main one gets fixed def _resolve_parent_apis(self, specifications): """ """ for specification_rest_name, specification in specifications.items(): specification.parent_apis[:] = [] for rest_name, remote_spec in specifications.items(): for related_child_api in remote_spec.child_apis: if related_child_api.rest_name == specification.rest_name: parent_api = SpecificationAPI(specification=remote_spec) parent_api.rest_name = remote_spec.rest_name if related_child_api.allows_get: parent_api.allows_get = True if related_child_api.allows_create: parent_api.allows_create = True if related_child_api.allows_update: parent_api.allows_update = True if related_child_api.allows_delete: parent_api.allows_Delete = True specification.parent_apis.append(parent_api) def _set_local_and_workflow_type(self, specifications): "" "" for rest_name, specification in specifications.items(): for attribute in specification.attributes: if attribute.type == "string": attribute.workflow_type = "string" elif attribute.type == "integer": attribute.workflow_type = "number" elif attribute.type == "boolean": attribute.workflow_type = "boolean" elif attribute.type == "time": attribute.workflow_type = "number" elif attribute.type == "float": attribute.workflow_type = "number" elif attribute.type == "enum": enum_type = specification.entity_name + attribute.local_name[0:1].upper() + attribute.local_name[1:] attribute.local_type = enum_type attribute.workflow_type = self._name.upper() + ':' + enum_type elif attribute.type == "object": attr_type = "Object" if self.attrs_types.has_option(specification.entity_name, attribute.local_name): type = self.attrs_types.get(specification.entity_name, attribute.local_name) if type: attr_type = type attribute.local_type = attr_type attribute.workflow_type = self._name.upper() + ':' + attr_type elif attribute.type == "list": if attribute.subtype == "enum": enum_subtype = specification.entity_name + attribute.local_name[0:1].upper() + attribute.local_name[1:] attribute.local_type = "java.util.List<" + enum_subtype + ">" attribute.workflow_type = "Array/" + self._name.upper() + ':' + enum_subtype elif attribute.subtype == "object": attr_subtype = "com.fasterxml.jackson.databind.JsonNode" if self.attrs_types.has_option(specification.entity_name, attribute.local_name): subtype = self.attrs_types.get(specification.entity_name, attribute.local_name) if subtype: attr_subtype = subtype attribute.local_type = "java.util.List<" + attr_subtype + ">" attribute.workflow_type = "Array/" + self._name.upper() + ':' + attr_subtype elif attribute.subtype == "entity": attribute.local_type = "java.util.List<com.fasterxml.jackson.databind.JsonNode>" attribute.workflow_type = "Array/string" else: attribute.local_type = "java.util.List<String>" attribute.workflow_type = "Array/string" def _copyfile(self, filename, input_directory, output_directory): "" "" input_file = "%s/%s" % (input_directory, filename) if os.path.isfile(input_file): output_file = "%s/%s" % (output_directory, filename) copyfile(input_file, output_file) def _get_entity_list_filter(self, collection, section, tag): "" "" entities = [] if collection.has_option("all", tag): entity_list_str = collection.get("all", tag) entities = entities + entity_list_str.split(", ") if collection.has_option(section, tag): entity_list_str = collection.get(section, tag) entities = entities + entity_list_str.split(", ") return entities
class Config(object): """A wrapper around RawConfigParser. Provides a ``defaults`` attribute of the same type which can be used to set default values. """ def __init__(self, version=None, _defaults=True): """Use read() to read in an existing config file. version should be an int starting with 0 that gets incremented if you want to register a new upgrade function. If None, upgrade is disabled. """ self._config = ConfigParser(dict_type=_sorted_dict) self.defaults = None if _defaults: self.defaults = Config(_defaults=False) self._version = version self._loaded_version = None self._upgrade_funcs = [] def _do_upgrade(self, func): assert self._loaded_version is not None assert self._version is not None old_version = self._loaded_version new_version = self._version if old_version != new_version: print_d("Config upgrade: %d->%d (%r)" % ( old_version, new_version, func)) func(self, old_version, new_version) def get_version(self): """Get the version of the loaded config file (for testing only) Raises Error if no file was loaded or versioning is disabled. """ if self._version is None: raise Error("Versioning disabled") if self._loaded_version is None: raise Error("No file loaded") return self._loaded_version def register_upgrade_function(self, function): """Register an upgrade function that gets called at each read() if the current config version and the loaded version don't match. Can also be registered after read was called. function(config, old_version: int, new_version: int) -> None """ if self._version is None: raise Error("Versioning disabled") self._upgrade_funcs.append(function) # after read(), so upgrade now if self._loaded_version is not None: self._do_upgrade(function) return function def reset(self, section, option): """Reset the value to the default state""" assert self.defaults is not None try: self._config.remove_option(section, option) except NoSectionError: pass def options(self, section): """Returns a list of options available in the specified section.""" try: options = self._config.options(section) except NoSectionError: if self.defaults: return self.defaults.options(section) raise else: if self.defaults: try: options.extend(self.defaults.options(section)) options = list_unique(options) except NoSectionError: pass return options def get(self, section, option, default=_DEFAULT): """get(section, option[, default]) -> str If default is not given or set, raises Error in case of an error """ try: return self._config.get(section, option) except Error: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.get(section, option) except Error: pass raise return default def gettext(self, *args, **kwargs): value = self.get(*args, **kwargs) # make sure there are no surrogates value.encode("utf-8") return value def getbytes(self, section, option, default=_DEFAULT): try: value = self._config.get(section, option) value = value.encode("utf-8", "surrogateescape") return value except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getbytes(section, option) except Error: pass raise Error(e) return default def getboolean(self, section, option, default=_DEFAULT): """getboolean(section, option[, default]) -> bool If default is not given or set, raises Error in case of an error """ try: return self._config.getboolean(section, option) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getboolean(section, option) except Error: pass raise Error(e) return default def getint(self, section, option, default=_DEFAULT): """getint(section, option[, default]) -> int If default is not give or set, raises Error in case of an error """ try: return int(self._config.getfloat(section, option)) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getint(section, option) except Error: pass raise Error(e) return default def getfloat(self, section, option, default=_DEFAULT): """getfloat(section, option[, default]) -> float If default is not give or set, raises Error in case of an error """ try: return self._config.getfloat(section, option) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getfloat(section, option) except Error: pass raise Error(e) return default def getstringlist(self, section, option, default=_DEFAULT): """getstringlist(section, option[, default]) -> list If default is not given or set, raises Error in case of an error. Gets a list of strings, using CSV to parse and delimit. """ try: value = self._config.get(section, option) parser = csv.reader( [value], lineterminator='\n', quoting=csv.QUOTE_MINIMAL) try: vals = next(parser) except (csv.Error, ValueError) as e: raise Error(e) return vals except Error as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getstringlist(section, option) except Error: pass raise Error(e) return default def setstringlist(self, section, option, values): """Saves a list of unicode strings using the csv module""" sw = StringIO() values = [str(v) for v in values] writer = csv.writer(sw, lineterminator='\n', quoting=csv.QUOTE_MINIMAL) writer.writerow(values) self.set(section, option, sw.getvalue()) def setlist(self, section, option, values, sep=","): """Saves a list of str using ',' as a separator and \\ for escaping""" values = [str(v) for v in values] joined = join_escape(values, sep) self.set(section, option, joined) def getlist(self, section, option, default=_DEFAULT, sep=","): """Returns a str list saved with setlist()""" try: value = self._config.get(section, option) return split_escape(value, sep) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getlist(section, option, sep=sep) except Error: pass raise Error(e) return default def set(self, section, option, value): """Saves the string representation for the passed value Don't pass unicode, encode first. """ if isinstance(value, bytes): raise TypeError("use setbytes") # RawConfigParser only allows string values but doesn't # scream if they are not (and it only fails before the # first config save..) if not isinstance(value, str): value = str(value) try: self._config.set(section, option, value) except NoSectionError: if self.defaults and self.defaults.has_section(section): self._config.add_section(section) self._config.set(section, option, value) else: raise def settext(self, section, option, value): value = str(value) # make sure there are no surrogates value.encode("utf-8") self.set(section, option, value) def setbytes(self, section, option, value): assert isinstance(value, bytes) value = value.decode("utf-8", "surrogateescape") self.set(section, option, value) def write(self, filename): """Write config to filename. Can raise EnvironmentError """ assert isinstance(filename, fsnative) mkdir(os.path.dirname(filename)) # temporary set the new version for saving if self._version is not None: self.add_section("__config__") self.set("__config__", "version", self._version) try: with atomic_save(filename, "wb") as fileobj: temp = StringIO() self._config.write(temp) data = temp.getvalue().encode("utf-8", "surrogateescape") fileobj.write(data) finally: if self._loaded_version is not None: self.set("__config__", "version", self._loaded_version) def clear(self): """Remove all sections.""" for section in self._config.sections(): self._config.remove_section(section) def is_empty(self): """Whether the config has any sections""" return not self._config.sections() def read(self, filename): """Reads the config from `filename` if the file exists, otherwise does nothing Can raise EnvironmentError, Error. """ try: with open(filename, "rb") as fileobj: fileobj = StringIO( fileobj.read().decode("utf-8", "surrogateescape")) self._config.readfp(fileobj, filename) except (IOError, OSError): return # don't upgrade if we just created a new config if self._version is not None: self._loaded_version = self.getint("__config__", "version", -1) for func in self._upgrade_funcs: self._do_upgrade(func) def has_option(self, section, option): """If the given section exists, and contains the given option""" return self._config.has_option(section, option) or ( self.defaults and self.defaults.has_option(section, option)) def has_section(self, section): """If the given section exists""" return self._config.has_section(section) or ( self.defaults and self.defaults.has_section(section)) def remove_option(self, section, option): """Remove the specified option from the specified section Can raise Error. """ return self._config.remove_option(section, option) def add_section(self, section): """Add a section named section to the instance if it not already exists.""" if not self._config.has_section(section): self._config.add_section(section)
class Config_File(object): def __init__(self, filename=None): self._cfg = RawConfigParser() self._tags = ["name", "type", "format"] self._filename = filename self._fieldDict = None self._idField = None if self._filename: self._fieldDict = self.read(self._filename) def field_dict(self): return self._fieldDict def read(self, filename): fieldDict = OrderedDict() result = self._cfg.read(filename) if len(result) == 0: raise OSError("Couldn't open '{}'".format(filename)) self._fields = self._cfg.sections() for s in self._fields: # print( "section: '%s'" % s ) fieldDict[s] = {} for o in self._cfg.options(s): # print("option : '%s'" % o ) if not o in self._tags: raise ValueError("No such field type: %s in section: %s" % (o, s)) if (o == "name"): if (self._cfg.get(s, o) == "_id"): if self._idField == None: self._idField = s else: raise ValueError("Duplicate _id field:{} and {}".format(self._idField, s)) fieldDict[s][o] = self._cfg.get(s, o) if not "name" in fieldDict[s]: #assert( s != None) fieldDict[s]["name"] = s # # format is optional for datetime input fields. It is used if present. # if not "format" in fieldDict[s]: fieldDict[s]["format"] = None return fieldDict def fieldDict(self): if self._fieldDict is None: raise ValueError("trying retrieve a fieldDict which has a 'None' value") else: return self._fieldDict def fields(self): return self._fields def hasNewName(self, section): return section != self._fieldDict[section]['name'] def type_value(self, fieldName): return self._fieldDict[fieldName]["type"] #return self._cfg.get(fieldName, "type") def format_value(self, fieldName): return self._fieldDict[fieldName]["format"] #return self._cfg.get(fieldName, "format") def name_value(self, fieldName): return self._fieldDict[fieldName]["name"] #return self._cfg.get(fieldName, "name") def __repr__(self): return "filename:{}\ndict:\n{}\n".format( str( self._filename), str(self._fieldDict))
import os from configparser import RawConfigParser import urllib3 CONFIG = RawConfigParser() CONFIG.read('./app.cfg') ENV = os.environ['SPECTRUM_ENVIRONMENT'] if 'SPECTRUM_ENVIRONMENT' in os.environ else 'end2end' COMMON = dict(CONFIG.items('common')) SETTINGS = dict(CONFIG.items(ENV)) GOOGLEBOT_USER_AGENT = 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)' # https://urllib3.readthedocs.io/en/latest/user-guide.html#ssl would solve the warning, # but it is `requests` using `urllib3` here so unclear how to apply that guide urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) if __name__ == '__main__': for section in CONFIG.sections(): print(section) for option in CONFIG.options(section): print(" %s: %s" % (option, CONFIG.get(section, option)))
class UsefulConfigParser(object): """A config parser that sucks less than those in module `ConfigParser`.""" def __init__(self, filenames_to_try=[]): # FUN FACT: In Python 3.2, they spontaneously changed the behaviour of # RawConfigParser so that it no longer considers ';' a comment delimiter # for inline comments. # # Compare: # "Configuration files may include comments, prefixed by specific # characters (# and ;). Comments may appear on their own in an otherwise # empty line, or may be entered in lines holding values or section names. # In the latter case, they need to be preceded by a whitespace character # to be recognized as a comment. (For backwards compatibility, only ; # starts an inline comment, while # does not.)" # -- https://docs.python.org/2/library/configparser.html # vs: # "Comment prefixes are strings that indicate the start of a valid comment # within a config file. comment_prefixes are used only on otherwise empty # lines (optionally indented) whereas inline_comment_prefixes can be used # after every valid value (e.g. section names, options and empty lines as # well). By default inline comments are disabled and '#' and ';' are used # as prefixes for whole line comments. # Changed in version 3.2: In previous versions of configparser behaviour # matched comment_prefixes=('#',';') and inline_comment_prefixes=(';',)." # -- https://docs.python.org/3/library/configparser.html#customizing-parser-behaviour # # Grrr... if sys.version_info.major >= 3: self._cp = RawConfigParser(dict_type=OrderedMultiDict, inline_comment_prefixes=(';',)) else: self._cp = RawConfigParser(dict_type=OrderedMultiDict) if isinstance(filenames_to_try, str): filenames_to_try = [filenames_to_try] self._filenames_to_try = filenames_to_try[:] def read(self, filenames_to_try=[]): if isinstance(filenames_to_try, str): filenames_to_try = [filenames_to_try] self._filenames_to_try.extend(filenames_to_try) return self._cp.read(self._filenames_to_try) def sections(self): return self._cp.sections() def options(self, section_name): ## The client code doesn't need to check in advance that the requested ## section name is present in the config; this function will check ## this automatically, so no exception is raised by RawConfigParser. ## Check that `section_name` is present in the config. ## Otherwise, RawConfigParser will raise ConfigParser.NoSectionError. if not self._cp.has_section(section_name): return [] return self._cp.options(section_name) def get(self, section_name, option_name, do_optionxform=True): if do_optionxform: # https://docs.python.org/2/library/configparser.html#ConfigParser.RawConfigParser.optionxform option_name = self._cp.optionxform(option_name) if section_name is None: return self._get_optval_in_sections(self.sections(), option_name) elif isinstance(section_name, str): return self._get_optval_in_sections([section_name], option_name) else: return self._get_optval_in_sections(section_name, option_name) def _get_optval_in_sections(self, section_names, option_name): ## The client code doesn't need to check in advance that the requested ## section name(s) are present in the config; this function will check ## this automatically, so no exception is raised by RawConfigParser. optvals = [] for section_name in section_names: ## Check that `section_name` is present in the config. ## Otherwise, RawConfigParser will raise ConfigParser.NoSectionError. if not self._cp.has_section(section_name): continue optvals.extend([optval for optname, optval in self._cp.items(section_name) if optname == option_name]) return optvals def getboolean(self, section_name, option_name, do_optionxform=True): # https://docs.python.org/2/library/configparser.html#ConfigParser.RawConfigParser.getboolean return [self._coerce_to_boolean(optval) for optval in self.get(section_name, option_name, do_optionxform)] _boolean_states = {'1': True, 'yes': True, 'true': True, 'on': True, '0': False, 'no': False, 'false': False, 'off': False} def _coerce_to_boolean(self, optval_str): # 'The accepted values for the option are "1", "yes", "true", and "on", # which cause this method to return True, and "0", "no", "false", and # "off", which cause it to return False. These string values are checked # in a case-insensitive manner. Any other value will cause it to raise # ValueError.' # https://docs.python.org/2/library/configparser.html#ConfigParser.RawConfigParser.getboolean ovs_lower = optval_str.lower() if ovs_lower not in self._boolean_states: raise ValueError("Not a boolean: %s" % optval_str) return self._boolean_states[ovs_lower]
class APIVersionWriter(TemplateFileWriter): """ Provide useful method to write Java files. """ def __init__(self, monolithe_config, api_info): """ Initializes a _JavaSDKAPIVersionFileWriter """ super(APIVersionWriter, self).__init__(package="monolithe.generators.lang.java") self.api_version = api_info["version"] self._api_version_string = SDKUtils.get_string_version(self.api_version) self.api_root = api_info["root"] self.api_prefix = api_info["prefix"] self.monolithe_config = monolithe_config self._output = self.monolithe_config.get_option("output", "transformer") self._name = self.monolithe_config.get_option("name", "transformer") self._class_prefix = "" self._product_accronym = self.monolithe_config.get_option("product_accronym") self._product_name = self.monolithe_config.get_option("product_name") self._url = self.monolithe_config.get_option("url", "transformer") self._package_prefix = self._get_package_prefix(self._url) self._package_name = self._package_prefix + '.' + self._name + '.' + SDKUtils.get_string_version(self.api_version) self._package_subdir = self._package_name.replace('.', '/') self._base_output_directory = "%s/java" % (self._output) self.output_directory = "%s/src/main/java/%s" % (self._base_output_directory, self._package_subdir) self.override_folder = os.path.normpath("%s/__overrides" % self._base_output_directory) self.fetchers_path = "/fetchers/" self.attrs_defaults = RawConfigParser() path = "%s/java/__attributes_defaults/attrs_defaults.ini" % self._output self.attrs_defaults.optionxform = str self.attrs_defaults.read(path) self.attrs_types = RawConfigParser() path = "%s/java/__attributes_defaults/attrs_types.ini" % self._output self.attrs_types.optionxform = str self.attrs_types.read(path) library_info = RawConfigParser() path = "%s/java/__attributes_defaults/library.ini" % self._output library_info.optionxform = str library_info.read(path) self.library_version = library_info.get(self.api_version, "libraryVersion") with open("%s/java/__code_header" % self._output, "r") as f: self.header_content = f.read() def perform(self, specifications): """ """ self._set_enum_list_local_type(specifications) self._write_info() self._write_session() self._write_build_file() task_manager = TaskManager() for rest_name, specification in specifications.items(): task_manager.start_task(method=self._write_model, specification=specification, specification_set=specifications) task_manager.start_task(method=self._write_fetcher, specification=specification, specification_set=specifications) task_manager.wait_until_exit() def _write_session(self): """ Write SDK session file Args: version (str): the version of the server """ base_name = "%sSession" % self._product_accronym filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=self.output_directory, filename=filename, template_name="session.java.tpl", version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, package_name=self._package_name) def _write_info(self): """ Write API Info file """ self.write(destination=self.output_directory, filename="SdkInfo.java", template_name="sdkinfo.java.tpl", version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, api_prefix=self.api_prefix, product_name=self._product_name, name=self._name, header=self.header_content, version_string=self._api_version_string, package_name=self._package_name) def _write_model(self, specification, specification_set): """ Write autogenerate specification file """ filename = "%s%s.java" % (self._class_prefix, specification.entity_name) override_content = self._extract_override_content(specification.entity_name) superclass_name = "RestRootObject" if specification.rest_name == self.api_root else "RestObject" defaults = {} section = specification.entity_name if self.attrs_defaults.has_section(section): for attribute in self.attrs_defaults.options(section): defaults[attribute] = self.attrs_defaults.get(section, attribute) self.write(destination=self.output_directory, filename=filename, template_name="model.java.tpl", specification=specification, specification_set=specification_set, version=self.api_version, name=self._name, class_prefix=self._class_prefix, product_accronym=self._product_accronym, override_content=override_content, superclass_name=superclass_name, header=self.header_content, version_string=self._api_version_string, package_name=self._package_name, attribute_defaults=defaults) return (filename, specification.entity_name) def _write_fetcher(self, specification, specification_set): """ Write fetcher """ destination = "%s%s" % (self.output_directory, self.fetchers_path) base_name = "%sFetcher" % specification.entity_name_plural filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=destination, filename=filename, template_name="fetcher.java.tpl", specification=specification, specification_set=specification_set, class_prefix=self._class_prefix, product_accronym=self._product_accronym, override_content=override_content, header=self.header_content, name=self._name, version_string=self._api_version_string, package_name=self._package_name) return (filename, specification.entity_name_plural) def _write_build_file(self): """ Write Maven build file (pom.xml) """ self.write(destination=self._base_output_directory, filename="pom.xml", template_name="pom.xml.tpl", version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, api_prefix=self.api_prefix, product_name=self._product_name, name=self._name, header=self.header_content, version_string=self._api_version_string, package_prefix=self._package_prefix, library_version=self.library_version) def _extract_override_content(self, name): """ """ # find override file specific_override_path = "%s/%s_%s%s.override.java" % (self.override_folder, self.api_version, self._class_prefix, name.title()) generic_override_path = "%s/%s%s.override.java" % (self.override_folder, self._class_prefix, name.title()) final_path = specific_override_path if os.path.exists(specific_override_path) else generic_override_path # Read override from file override_content = None if os.path.isfile(final_path): override_content = open(final_path).read() return override_content def _get_package_prefix(self, url): """ """ hostname_parts = self._get_hostname_parts(url) package_name = "" for index, hostname_part in enumerate(reversed(hostname_parts)): package_name = package_name + hostname_part if index < len(hostname_parts) - 1: package_name = package_name + '.' return package_name def _get_hostname_parts(self, url): """ """ if url.find("http://") != 0: url = "http://" + url hostname = urlparse(url).hostname hostname_parts = hostname.split('.') valid_hostname_parts = [] for hostname_part in hostname_parts: if hostname_part != "www": valid_hostname_parts.append(hostname_part) return valid_hostname_parts def _set_enum_list_local_type(self, specifications): """ This method is needed until get_type_name() is enhanced to include specification subtype and local_name """ for rest_name, specification in specifications.items(): for attribute in specification.attributes: if attribute.type == "enum": enum_type = attribute.local_name[0:1].upper() + attribute.local_name[1:] attribute.local_type = enum_type elif attribute.type == "object": attr_type = "Object" if self.attrs_types.has_option(specification.entity_name, attribute.local_name): type = self.attrs_types.get(specification.entity_name, attribute.local_name) if type: attr_type = type attribute.local_type = attr_type elif attribute.type == "list": if attribute.subtype == "enum": enum_subtype = attribute.local_name[0:1].upper() + attribute.local_name[1:] attribute.local_type = "java.util.List<" + enum_subtype + ">" elif attribute.subtype == "object": attr_subtype = "com.fasterxml.jackson.databind.JsonNode" if self.attrs_types.has_option(specification.entity_name, attribute.local_name): subtype = self.attrs_types.get(specification.entity_name, attribute.local_name) if subtype: attr_subtype = subtype attribute.local_type = "java.util.List<" + attr_subtype + ">" elif attribute.subtype == "entity": attribute.local_type = "java.util.List<com.fasterxml.jackson.databind.JsonNode>" else: attribute.local_type = "java.util.List<String>"
def parse_config_files(self, filenames=None): if filenames is None: filenames = self.find_config_files() logger.debug("Distribution.parse_config_files():") parser = RawConfigParser() for filename in filenames: logger.debug(" reading %s", filename) parser.read(filename, encoding='utf-8') if os.path.split(filename)[-1] == 'setup.cfg': self._read_setup_cfg(parser, filename) for section in parser.sections(): if section == 'global': if parser.has_option('global', 'compilers'): self._load_compilers(parser.get('global', 'compilers')) if parser.has_option('global', 'commands'): self._load_commands(parser.get('global', 'commands')) options = parser.options(section) opt_dict = self.dist.get_option_dict(section) for opt in options: if opt == '__name__': continue val = parser.get(section, opt) opt = opt.replace('-', '_') if opt == 'sub_commands': val = split_multiline(val) if isinstance(val, str): val = [val] # Hooks use a suffix system to prevent being overriden # by a config file processed later (i.e. a hook set in # the user config file cannot be replaced by a hook # set in a project config file, unless they have the # same suffix). if (opt.startswith("pre_hook.") or opt.startswith("post_hook.")): hook_type, alias = opt.split(".") hook_dict = opt_dict.setdefault( hook_type, (filename, {}))[1] hook_dict[alias] = val else: opt_dict[opt] = filename, val # Make the RawConfigParser forget everything (so we retain # the original filenames that options come from) parser.__init__() # If there was a "global" section in the config file, use it # to set Distribution options. if 'global' in self.dist.command_options: for opt, (src, val) in self.dist.command_options['global'].items(): alias = self.dist.negative_opt.get(opt) try: if alias: setattr(self.dist, alias, not strtobool(val)) elif opt == 'dry_run': # FIXME ugh! setattr(self.dist, opt, strtobool(val)) else: setattr(self.dist, opt, val) except ValueError as msg: raise PackagingOptionError(msg)
class ParamStore(object): def __init__(self, root_dir, file_name): self._lock = threading.Lock() with self._lock: if not os.path.isdir(root_dir): raise RuntimeError( 'Directory "' + root_dir + '" does not exist.') self._path = os.path.join(root_dir, file_name) self._dirty = False # open config file self._config = RawConfigParser() self._config.read(self._path) def flush(self): if not self._dirty: return with self._lock: self._dirty = False with open(self._path, 'w') as of: self._config.write(of) def get(self, section, option, default=None): """Get a parameter value and return a string. If default is specified and section or option are not defined in the file, they are created and set to default, which is then the return value. """ with self._lock: if not self._config.has_option(section, option): if default is not None: self._set(section, option, default) return default return self._config.get(section, option) def get_datetime(self, section, option, default=None): result = self.get(section, option, default) if result: return WSDateTime.from_csv(result) return result def set(self, section, option, value): """Set option in section to string value.""" with self._lock: self._set(section, option, value) def _set(self, section, option, value): if not self._config.has_section(section): self._config.add_section(section) elif (self._config.has_option(section, option) and self._config.get(section, option) == value): return self._config.set(section, option, value) self._dirty = True def unset(self, section, option): """Remove option from section.""" with self._lock: if not self._config.has_section(section): return if self._config.has_option(section, option): self._config.remove_option(section, option) self._dirty = True if not self._config.options(section): self._config.remove_section(section) self._dirty = True
class APIVersionWriter(TemplateFileWriter): """ Provide usefull method to write Objj files. """ def __init__(self, monolithe_config, api_info): """ """ super(APIVersionWriter, self).__init__(package="monolithe.generators.lang.objj") self.api_version = api_info["version"] self.api_root = api_info["root"] self.api_prefix = api_info["prefix"] self.monolithe_config = monolithe_config self._output = self.monolithe_config.get_option("output", "transformer") self._transformation_name = self.monolithe_config.get_option("name", "transformer") self._class_prefix = self.monolithe_config.get_option("class_prefix", "transformer") self._product_accronym = self.monolithe_config.get_option("product_accronym") self._product_name = self.monolithe_config.get_option("product_name") self.output_directory = "%s/objj/" % (self._output) self.override_folder = os.path.normpath("%s/../../__overrides" % self.output_directory) self.fetchers_path = "/Fetchers/" self.attrs_defaults = RawConfigParser() path = "%s/objj/__attributes_defaults/attrs_defaults.ini" % self._output self.attrs_defaults.optionxform = str self.attrs_defaults.read(path) with open("%s/objj/__code_header" % self._output, "r") as f: self.header_content = f.read() def perform(self, specifications): """ """ self.model_filenames = dict() self.fetcher_filenames = dict() task_manager = TaskManager() for rest_name, specification in specifications.items(): task_manager.start_task(method=self._write_model, specification=specification, specification_set=specifications) task_manager.start_task(method=self._write_fetcher, specification=specification, specification_set=specifications) task_manager.wait_until_exit() self._write_init_models(filenames=self.model_filenames) self._write_init_fetchers(filenames=self.fetcher_filenames) def _write_model(self, specification, specification_set): """ """ filename = "%s%s.j" % (self._class_prefix, specification.entity_name) override_content = self._extract_override_content(specification.entity_name) constants = self._extract_constants(specification) superclass_name = "NURESTAbstractRoot" if specification.rest_name == self.api_root else "NURESTObject" defaults = {} section = "%s%s" % (self._class_prefix, specification.entity_name) if self.attrs_defaults.has_section(section): for attribute in self.attrs_defaults.options(section): defaults[attribute] = self.attrs_defaults.get(section, attribute) self.write(destination=self.output_directory, filename=filename, template_name="ObjectModel.j.tpl", specification=specification, specification_set=specification_set, version=self.api_version, class_prefix=self._class_prefix, product_accronym=self._product_accronym, override_content=override_content, superclass_name=superclass_name, constants=constants, header=self.header_content, attribute_defaults=defaults) self.model_filenames[filename] = specification.entity_name def _write_init_models(self, filenames): """ """ filename = "Models.j" ordered = OrderedDict(sorted(filenames.items())) self.write(destination=self.output_directory, filename=filename, template_name="Models.j.tpl", filenames=ordered, class_prefix=self._class_prefix, header=self.header_content) def _write_fetcher(self, specification, specification_set): """ """ destination = "%s/%s" % (self.output_directory, self.fetchers_path) base_name = "%sFetcher" % specification.entity_name_plural filename = "%s%s.j" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=destination, filename=filename, template_name="ObjectFetcher.j.tpl", specification=specification, class_prefix=self._class_prefix, override_content=override_content, header=self.header_content) self.fetcher_filenames[filename] = specification.entity_name_plural def _write_init_fetchers(self, filenames): """ """ filename = "Fetchers/Fetchers.j" ordered = OrderedDict(sorted(filenames.items())) self.write(destination=self.output_directory, filename=filename, template_name="Fetchers.j.tpl", filenames=ordered, class_prefix=self._class_prefix, header=self.header_content) def _extract_override_content(self, name): """ """ # find override file specific_override_path = "%s/%s_%s%s.override.py" % (self.override_folder, self.api_version, self._class_prefix.lower(), name.lower()) generic_override_path = "%s/%s%s.override.py" % (self.override_folder, self._class_prefix.lower(), name.lower()) final_path = specific_override_path if os.path.exists(specific_override_path) else generic_override_path # Read override from file override_content = None if os.path.isfile(final_path): override_content = open(final_path).read() return override_content def _extract_constants(self, specification): """ Removes attributes and computes constants """ constants = {} for attribute in specification.attributes: if attribute.allowed_choices and len(attribute.allowed_choices) > 0: name = attribute.local_name name = name[:1].upper() + name[1:] for choice in attribute.allowed_choices: constants["%s%s%s_%s" % (self._class_prefix, specification.entity_name, name, choice.upper())] = choice return constants
class APIVersionWriter(TemplateFileWriter): """ Provide usefull method to write Go files. """ def __init__(self, monolithe_config, api_info): """ Initializes a _GoSDKAPIVersionFileWriter """ super(APIVersionWriter, self).__init__(package="monolithe.generators.lang.go") self.monolithe_config = monolithe_config self.api_version = api_info["version"] self.api_root = api_info["root"] self.api_prefix = api_info["prefix"] self._output = self.monolithe_config.get_option("output", "transformer") self._transformation_name = self.monolithe_config.get_option("name", "transformer") self._product_accronym = self.monolithe_config.get_option("product_accronym") self._product_name = self.monolithe_config.get_option("product_name") self.output_directory = "%s/go/%s" % (self._output, self._transformation_name) self.attrs_defaults = RawConfigParser() path = "%s/go/__attributes_defaults/attrs_defaults.ini" % self._output self.attrs_defaults.optionxform = str self.attrs_defaults.read(path) with open("%s/go/__code_header" % self._output, "r") as f: self.header_content = f.read() def perform(self, specifications): """ """ self._write_info() self._write_session() task_manager = TaskManager() for rest_name, specification in specifications.items(): task_manager.start_task(method=self._write_model, specification=specification, specification_set=specifications) task_manager.wait_until_exit() self._format() def _write_info(self): """ """ self.write(destination=self.output_directory, filename="sdkinfo.go", template_name="sdkinfo.go.tpl", version=self.api_version, product_accronym=self._product_accronym, root_api=self.api_root, api_prefix=self.api_prefix, product_name=self._product_name, name=self._transformation_name, header=self.header_content) def _write_session(self): """ """ self.write(destination=self.output_directory, filename="session.go", template_name="session.go.tpl", version=self.api_version, root_api=self.api_root, api_prefix=self.api_prefix, name=self._transformation_name, header=self.header_content) def _write_model(self, specification, specification_set): """ """ filename = "%s.go" % (specification.entity_name.lower()) defaults = {} section = specification.entity_name if self.attrs_defaults.has_section(section): for attribute in self.attrs_defaults.options(section): defaults[attribute] = self.attrs_defaults.get(section, attribute) self.write(destination=self.output_directory, filename=filename, template_name="model.go.tpl", specification=specification, specification_set=specification_set, name=self._transformation_name, header=self.header_content, attribute_defaults=defaults) return (filename, specification.entity_name) def _format(self): """ """ os.system("gofmt -w '%s' >/dev/null 2>&1" % self.output_directory)
class Config: """A wrapper around RawConfigParser. Provides a ``defaults`` attribute of the same type which can be used to set default values. """ def __init__(self, version=None, _defaults=True): """Use read() to read in an existing config file. version should be an int starting with 0 that gets incremented if you want to register a new upgrade function. If None, upgrade is disabled. """ self._config = ConfigParser(dict_type=_sorted_dict) self.defaults = None if _defaults: self.defaults = Config(_defaults=False) self._version = version self._loaded_version = None self._upgrade_funcs = [] def _do_upgrade(self, func): assert self._loaded_version is not None assert self._version is not None old_version = self._loaded_version new_version = self._version if old_version != new_version: print_d("Config upgrade: %d->%d (%r)" % (old_version, new_version, func)) func(self, old_version, new_version) def get_version(self): """Get the version of the loaded config file (for testing only) Raises Error if no file was loaded or versioning is disabled. """ if self._version is None: raise Error("Versioning disabled") if self._loaded_version is None: raise Error("No file loaded") return self._loaded_version def register_upgrade_function(self, function): """Register an upgrade function that gets called at each read() if the current config version and the loaded version don't match. Can also be registered after read was called. function(config, old_version: int, new_version: int) -> None """ if self._version is None: raise Error("Versioning disabled") self._upgrade_funcs.append(function) # after read(), so upgrade now if self._loaded_version is not None: self._do_upgrade(function) return function def reset(self, section, option): """Reset the value to the default state""" assert self.defaults is not None try: self._config.remove_option(section, option) except NoSectionError: pass def options(self, section): """Returns a list of options available in the specified section.""" try: options = self._config.options(section) except NoSectionError: if self.defaults: return self.defaults.options(section) raise else: if self.defaults: try: options.extend(self.defaults.options(section)) options = list_unique(options) except NoSectionError: pass return options def get(self, section, option, default=_DEFAULT): """get(section, option[, default]) -> str If default is not given or set, raises Error in case of an error """ try: return self._config.get(section, option) except Error: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.get(section, option) except Error: pass raise return default def gettext(self, *args, **kwargs): value = self.get(*args, **kwargs) # make sure there are no surrogates value.encode("utf-8") return value def getbytes(self, section, option, default=_DEFAULT): try: value = self._config.get(section, option) value = value.encode("utf-8", "surrogateescape") return value except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getbytes(section, option) except Error: pass raise Error(e) return default def getboolean(self, section, option, default=_DEFAULT): """getboolean(section, option[, default]) -> bool If default is not given or set, raises Error in case of an error """ try: return self._config.getboolean(section, option) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getboolean(section, option) except Error: pass raise Error(e) return default def getint(self, section, option, default=_DEFAULT): """getint(section, option[, default]) -> int If default is not give or set, raises Error in case of an error """ try: return int(self._config.getfloat(section, option)) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getint(section, option) except Error: pass raise Error(e) return default def getfloat(self, section, option, default=_DEFAULT): """getfloat(section, option[, default]) -> float If default is not give or set, raises Error in case of an error """ try: return self._config.getfloat(section, option) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getfloat(section, option) except Error: pass raise Error(e) return default def getstringlist(self, section, option, default=_DEFAULT): """getstringlist(section, option[, default]) -> list If default is not given or set, raises Error in case of an error. Gets a list of strings, using CSV to parse and delimit. """ try: value = self._config.get(section, option) parser = csv.reader([value], lineterminator='\n', quoting=csv.QUOTE_MINIMAL) try: vals = next(parser) except (csv.Error, ValueError) as e: raise Error(e) return vals except Error as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getstringlist(section, option) except Error: pass raise Error(e) return default def setstringlist(self, section, option, values): """Saves a list of unicode strings using the csv module""" sw = StringIO() values = [str(v) for v in values] writer = csv.writer(sw, lineterminator='\n', quoting=csv.QUOTE_MINIMAL) writer.writerow(values) self.set(section, option, sw.getvalue()) def setlist(self, section, option, values, sep=","): """Saves a list of str using ',' as a separator and \\ for escaping""" values = [str(v) for v in values] joined = join_escape(values, sep) self.set(section, option, joined) def getlist(self, section, option, default=_DEFAULT, sep=","): """Returns a str list saved with setlist()""" try: value = self._config.get(section, option) return split_escape(value, sep) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getlist(section, option, sep=sep) except Error: pass raise Error(e) return default def set(self, section, option, value): """Saves the string representation for the passed value Don't pass unicode, encode first. """ if isinstance(value, bytes): raise TypeError("use setbytes") # RawConfigParser only allows string values but doesn't # scream if they are not (and it only fails before the # first config save..) if not isinstance(value, str): value = str(value) try: self._config.set(section, option, value) except NoSectionError: if self.defaults and self.defaults.has_section(section): self._config.add_section(section) self._config.set(section, option, value) else: raise def settext(self, section, option, value): value = str(value) # make sure there are no surrogates value.encode("utf-8") self.set(section, option, value) def setbytes(self, section, option, value): assert isinstance(value, bytes) value = value.decode("utf-8", "surrogateescape") self.set(section, option, value) def write(self, filename): """Write config to filename. Can raise EnvironmentError """ assert isinstance(filename, fsnative) mkdir(os.path.dirname(filename)) # temporary set the new version for saving if self._version is not None: self.add_section("__config__") self.set("__config__", "version", self._version) try: with atomic_save(filename, "wb") as fileobj: temp = StringIO() self._config.write(temp) data = temp.getvalue().encode("utf-8", "surrogateescape") fileobj.write(data) finally: if self._loaded_version is not None: self.set("__config__", "version", self._loaded_version) def clear(self): """Remove all sections.""" for section in self._config.sections(): self._config.remove_section(section) def is_empty(self): """Whether the config has any sections""" return not self._config.sections() def read(self, filename): """Reads the config from `filename` if the file exists, otherwise does nothing Can raise EnvironmentError, Error. """ try: with open(filename, "rb") as fileobj: fileobj = StringIO(fileobj.read().decode( "utf-8", "surrogateescape")) self._config.readfp(fileobj, filename) except (IOError, OSError): return # don't upgrade if we just created a new config if self._version is not None: self._loaded_version = self.getint("__config__", "version", -1) for func in self._upgrade_funcs: self._do_upgrade(func) def has_option(self, section, option): """If the given section exists, and contains the given option""" return self._config.has_option( section, option) or (self.defaults and self.defaults.has_option(section, option)) def has_section(self, section): """If the given section exists""" return self._config.has_section(section) or ( self.defaults and self.defaults.has_section(section)) def remove_option(self, section, option): """Remove the specified option from the specified section Can raise Error. """ return self._config.remove_option(section, option) def add_section(self, section): """Add a section named section to the instance if it not already exists.""" if not self._config.has_section(section): self._config.add_section(section)
parser.read(OAUTH_CONFIG) trait_parser = RawConfigParser() trait_parser.read(TRAIT_FILE) GENOTYPE_PROVIDERS = {} providers = parser.sections() for provider in providers: provider_dict = {} for option in parser.options(provider): provider_dict[option] = parser.get(provider,option) provider_dict['has_oauth'] = _can_do_oauth(provider_dict) GENOTYPE_PROVIDERS[provider] = provider_dict TRAITS = [] traits = trait_parser.sections() for trait in traits: trait_dict = {'name':trait} for option in trait_parser.options(trait): if option == 'quantitive': trait_dict[option] = trait_parser.getboolean(trait,option) else: trait_dict[option] = trait_parser.get(trait,option) trait_dict['histogram'] = _get_trait_histogram(trait) trait_dict['meanRisk'] = float(np.mean(np.asarray(trait_dict['histogram'][1:]))) TRAITS.append(trait_dict)