def __init__(self, okta_profile, verbose, logger, totp_token): home_dir = os.path.expanduser('~') okta_config = home_dir + '/.okta-aws' parser = RawConfigParser() parser.read(okta_config) profile = okta_profile self.totp_token = totp_token self.logger = logger self.factor = "" self.app = None if parser.has_option(profile, 'base-url'): self.base_url = "https://%s" % parser.get(profile, 'base-url') self.logger.info("Authenticating to: %s" % self.base_url) else: self.logger.error("No base-url set in ~/.okta-aws, make sure you have profile " + profile + " setup") exit(1) if parser.has_option(profile, 'username'): self.username = parser.get(profile, 'username') self.logger.info("Authenticating as: %s" % self.username) else: self.username = input('Enter username: '******'password'): self.password = parser.get(profile, 'password') else: self.password = getpass('Enter password: '******'factor'): self.factor = parser.get(profile, 'factor') self.logger.debug("Setting MFA factor to %s" % self.factor) if parser.has_option(profile, 'app'): self.app = parser.get(profile, 'app') self.logger.debug("Setting AWS app to %s" % self.app) self.verbose = verbose
def _parse_legacy_config_file(self): """ Parse a legacy configuration file. """ conf = RawConfigParser() conf.read(LEGACY_CONFIG_FILE) styles = self.styles.copy() if conf.has_option('params', 'dm_template'): styles['dm_template'] = conf.get('params', 'dm_template') if conf.has_option('params', 'header_template'): styles['header_template'] = conf.get('params', 'header_template') self.styles.update(styles) if conf.has_option('params', 'logging_level'): self.logging_level = conf.getint('params', 'logging_level') for binding in self.key_bindings: if conf.has_option('keys', binding): custom_key = conf.get('keys', binding) self._set_key_binding(binding, custom_key) palette_labels = [color[0] for color in PALETTE] for label in palette_labels: if conf.has_option('colors', label): custom_fg = conf.get('colors', label) self._set_color(label, custom_fg)
def __init__(self, profile, okta_profile, lookup, verbose, logger): home_dir = os.path.expanduser('~') self.creds_dir = home_dir + "/.aws" self.creds_file = self.creds_dir + "/credentials" self.lookup = lookup self.profile = profile self.verbose = verbose self.logger = logger self.role = "" self.aws_partition = AwsPartition.AWS okta_config = home_dir + '/.okta-aws' parser = RawConfigParser() parser.read(okta_config) if parser.has_option(okta_profile, 'role'): self.role = parser.get(okta_profile, 'role') self.logger.debug("Setting AWS role to %s" % self.role) self.aws_partition = self.__find_aws_partition_from_role_arn( self.role) self.logger.debug("Setting AWS partition to %s" % self.aws_partition) if parser.has_option(okta_profile, 'profile') and not profile: self.profile = parser.get(okta_profile, 'profile') self.logger.debug("Setting AWS profile to %s" % self.profile)
def read_config(self, config): result = [] stack = [config] while 1: config = stack.pop() src = None if isinstance(config, (str, unicode)): src = os.path.relpath(config) _config = RawConfigParser() _config.optionxform = lambda s: s if getattr(config, 'read', None) is not None: _config.readfp(config) path = self.path else: if not os.path.exists(config): log.error("Config file '%s' doesn't exist.", config) sys.exit(1) _config.read(config) path = os.path.dirname(config) for section in reversed(_config.sections()): for key, value in reversed(_config.items(section)): result.append((src, path, section, key, value)) result.append((src, path, section, None, None)) if _config.has_option('global', 'extends'): extends = _config.get('global', 'extends').split() elif _config.has_option('global:global', 'extends'): extends = _config.get('global:global', 'extends').split() else: break stack[0:0] = [ os.path.abspath(os.path.join(path, x)) for x in reversed(extends)] return reversed(result)
def parse_cmake_module(s_in): s_out = [] is_rst_line = False for line in s_in.split('\n'): if is_rst_line: if len(line) > 0: if line[0] != '#': is_rst_line = False else: is_rst_line = False if is_rst_line: s_out.append(line[2:]) if '#.rst:' in line: is_rst_line = True autocmake_entry = '\n'.join(s_out).split('Example autocmake.cfg entry::')[1] autocmake_entry = autocmake_entry.replace('\n ', '\n') buf = StringIO(autocmake_entry) config = RawConfigParser(dict_type=OrderedDict) config.readfp(buf) config_docopt = None config_define = None config_export = None for section in config.sections(): if config.has_option(section, 'docopt'): config_docopt = config.get(section, 'docopt') if config.has_option(section, 'define'): config_define = config.get(section, 'define') if config.has_option(section, 'export'): config_export = config.get(section, 'export') return config_docopt, config_define, config_export
def run(program, type, shots = 100): if type == 'real': if not run.isInit: # Setup the API key for the real quantum computer. parser = RawConfigParser() parser.read('config.ini') # Read configuration values. proxies = ast.literal_eval(parser.get('IBM', 'proxies')) if parser.has_option('IBM', 'proxies') else None verify = (True if parser.get('IBM', 'verify') == 'True' else False) if parser.has_option('IBM', 'verify') else True token = parser.get('IBM', 'key') IBMQ.enable_account(token = token, proxies = proxies, verify = verify) run.isInit = True # Set the backend server. backend = qiskit.providers.ibmq.least_busy(qiskit.IBMQ.backends(simulator=False)) # Execute the program on the quantum machine. print("Running on", backend.name()) start = time.time() job = qiskit.execute(program, backend) result = job.result().get_counts() stop = time.time() print("Request completed in " + str(round((stop - start) / 60, 2)) + "m " + str(round((stop - start) % 60, 2)) + "s") return result else: # Execute the program in the simulator. print("Running on the simulator.") start = time.time() job = qiskit.execute(program, qiskit.Aer.get_backend('qasm_simulator'), shots=shots) result = job.result().get_counts() stop = time.time() print("Request completed in " + str(round((stop - start) / 60, 2)) + "m " + str(round((stop - start) % 60, 2)) + "s") return result
def get_comic_url(comic_info: RawConfigParser): comic_domain, base_directory = None, "" if os.path.isfile("CNAME"): with open("CNAME") as f: comic_domain = f.read().strip('/') elif "GITHUB_REPOSITORY" in os.environ: repo_author, base_directory = os.environ["GITHUB_REPOSITORY"].split("/") comic_domain = f"{repo_author}.github.io" else: if comic_info.has_option("Comic Settings", "Comic domain"): comic_domain = comic_info.get("Comic Settings", "Comic domain").strip("/") else: raise ValueError( 'Set "Comic domain" in the [Comic Settings] section of your comic_info.ini file ' 'before building your site locally. Please see the comic_git wiki for more information.' ) if comic_info.has_option("Comic Settings", "Comic subdirectory"): base_directory = comic_info.get("Comic Settings", "Comic subdirectory").strip("/") if not comic_domain.startswith("http"): if (comic_info.has_option("Comic Settings", "Use https when building comic URL") and comic_info.getboolean("Comic Settings", "Use https when building comic URL")): comic_domain = "https://" + comic_domain else: comic_domain = "http://" + comic_domain if base_directory: base_directory = "/" + base_directory comic_url = comic_domain + base_directory print(f"Base URL: {comic_url}, base subdirectory: {base_directory}") return comic_url, base_directory
def parse_token_file(self, token_file): conf = RawConfigParser() conf.read(token_file) if conf.has_option(SECTION_TOKEN, "oauth_token"): self.oauth_token = conf.get(SECTION_TOKEN, "oauth_token") if conf.has_option(SECTION_TOKEN, "oauth_token_secret"): self.oauth_token_secret = conf.get(SECTION_TOKEN, "oauth_token_secret")
def _parse_legacy_token_file(self): conf = RawConfigParser() conf.read(LEGACY_TOKEN_FILE) if conf.has_option(SECTION_TOKEN, "oauth_token"): self.oauth_token = conf.get(SECTION_TOKEN, "oauth_token") if conf.has_option(SECTION_TOKEN, "oauth_token"): self.oauth_token_secret = conf.get(SECTION_TOKEN, "oauth_token_secret")
def parse_token_file(self, token_file): conf = RawConfigParser() conf.read(token_file) if conf.has_option(SECTION_TOKEN, 'oauth_token'): self.oauth_token = conf.get(SECTION_TOKEN, 'oauth_token') if conf.has_option(SECTION_TOKEN, 'oauth_token_secret'): self.oauth_token_secret = conf.get(SECTION_TOKEN, 'oauth_token_secret')
def _parse_legacy_token_file(self): conf = RawConfigParser() conf.read(LEGACY_TOKEN_FILE) if conf.has_option(SECTION_TOKEN, 'oauth_token'): self.oauth_token = conf.get(SECTION_TOKEN, 'oauth_token') if conf.has_option(SECTION_TOKEN, 'oauth_token'): self.oauth_token_secret = conf.get(SECTION_TOKEN, 'oauth_token_secret')
class OktaAuthConfig(): """ Config helper class """ def __init__(self, logger): self.logger = logger self.config_path = os.path.expanduser('~') + '/.okta-aws' self._value = RawConfigParser() self._value.read(self.config_path) def base_url_for(self, okta_profile): """ Gets base URL from config """ if self._value.has_option(okta_profile, 'base-url'): base_url = self._value.get(okta_profile, 'base-url') self.logger.info("Authenticating to: %s" % base_url) else: base_url = self._value.get('default', 'base-url') self.logger.info("Using base-url from default profile %s" % base_url) return base_url def username_for(self, okta_profile): """ Gets username from config """ if self._value.has_option(okta_profile, 'username'): username = self._value.get(okta_profile, 'username') self.logger.info("Authenticating as: %s" % username) else: username = input('Enter username: '******'password'): password = self._value.get(okta_profile, 'password') else: password = getpass('Enter password: '******'factor'): factor = self._value.get(okta_profile, 'factor') self.logger.debug("Setting MFA factor to %s" % factor) return factor return None def save_chosen_role_for_profile(self, okta_profile, role_arn): """ Gets role from config """ if not self._value.has_section(okta_profile): self._value.add_section(okta_profile) base_url = self.base_url_for(okta_profile) self._value.set(okta_profile, 'base-url', base_url) self._value.set(okta_profile, 'role', role_arn) with open(self.config_path, 'w+') as configfile: self._value.write(configfile)
def settings_from_config(options): """Try to read config file and parse settings. Args: options: parsed NameSpace, with `config` and maybe `acl` values Returns: tuple of S3Config and PyPIConfig objects, or Nones when missing values """ parser = RawConfigParser() if isinstance(options.config, list): config_file = options.config[0] else: config_file = options.config try: parser.read(config_file) except Exception as error: print(error, file=sys.stderr) key = "pypicloud" # config section key if key not in parser.sections(): return None, None s3_conf = None pypi_conf = None s3_required = ("bucket", "access", "secret") pypi_required = ("repository", "username", "password") if all([parser.has_option(key, opt) for opt in s3_required]): if getattr(options, "acl", None): acl = options.acl[0] elif parser.has_option(key, "acl"): acl = parser.get(key, "acl") else: acl = None s3_conf = S3Config( parser.get(key, "bucket"), parser.get(key, "access"), parser.get(key, "secret"), acl, ) if all([parser.has_option(key, opt) for opt in pypi_required]): pypi_conf = PyPIConfig( parser.get(key, "repository"), parser.get(key, "username"), parser.get(key, "password"), ) return s3_conf, pypi_conf
def loadConfig(): global CONFIG CONFIG = RawConfigParser() CONFIG.read(CONFIG_FILE) if not CONFIG.has_option('DEFAULT', 'file'): CONFIG.set('DEFAULT', 'file', '.scrobbler.log') if not CONFIG.has_option('DEFAULT', 'user'): CONFIG.set('DEFAULT', 'user', '') if not CONFIG.has_option('DEFAULT', 'password'): CONFIG.set('DEFAULT', 'password', '') if not CONFIG.has_option('DEFAULT', 'timezone'): CONFIG.set('DEFAULT', 'timezone', '0')
class FactorioLocale: def __init__(self): self.conf = RawConfigParser() self.crap = RawConfigParser() def get_name(self, section, name): return self.conf.get(section, name) or '#%s#%s#' % (section, name) def load(self, csv): conf = RawConfigParser() with open(csv, 'rb') as f: input_bytes = f.read() decoded = input_bytes.decode( chardet.detect(input_bytes)['encoding']) decoded = '[__global__]\n' + decoded conf.read_string(decoded) for sec in conf.sections(): if not self.conf.has_section(sec): self.conf.add_section(sec) self.crap.add_section(sec) for k, v in conf.items(sec): is_crap = False if '__' in v: is_crap = True if not is_crap: if self.conf.has_option(sec, k): if self.conf.get(sec, k).lower() != v.lower(): print('Overwriting locale %s (%r -> %r)' % (k, self.conf.get(sec, k), v)) self.conf.set(sec, k, v) else: if self.crap.has_option(sec, k): print('Overwriting crap locale %s (%r -> %r)' % (k, self.crap.get(sec, k), v)) self.crap.set(sec, k, v) def merge(self): for sec in self.crap.sections(): for k, v in self.crap.items(sec): if not self.conf.has_option(sec, k): print('Using crap locale %s (%r)' % (k, v)) self.conf.set(sec, k, v) def save(self, out): with open(out, 'w') as f: self.conf.write(f)
class FactorioLocale: def __init__(self): self.conf = RawConfigParser() self.crap = RawConfigParser() def get_name(self, section, name): return self.conf.get(section, name) or '#%s#%s#' % (section, name) def load(self, csv): conf = RawConfigParser() with open(csv, 'rb') as f: input_bytes = f.read() decoded = input_bytes.decode(chardet.detect(input_bytes)['encoding']) decoded = '[__global__]\n' + decoded conf.read_string(decoded) for sec in conf.sections(): if not self.conf.has_section(sec): self.conf.add_section(sec) self.crap.add_section(sec) for k, v in conf.items(sec): is_crap = False if '__' in v: is_crap = True if not is_crap: if self.conf.has_option(sec, k): if self.conf.get(sec, k).lower() != v.lower(): print('Overwriting locale %s (%r -> %r)' % (k, self.conf.get(sec, k), v)) self.conf.set(sec, k, v) else: if self.crap.has_option(sec, k): print('Overwriting crap locale %s (%r -> %r)' % (k, self.crap.get(sec, k), v)) self.crap.set(sec, k, v) def merge(self): for sec in self.crap.sections(): for k, v in self.crap.items(sec): if not self.conf.has_option(sec, k): print('Using crap locale %s (%r)' % (k, v)) self.conf.set(sec, k, v) def save(self, out): with open(out, 'w') as f: self.conf.write(f)
def _load_config(self): """Load configuration options.""" cfg = RawConfigParser() cfg.read([config.config_path]) if cfg.has_option("ui", "browse_dir"): self._browse_dir = cfg.get("ui", "browse_dir") if cfg.has_option("ui", "enable_downscaling"): enable_downscaling = cfg.getboolean("ui", "enable_downscaling") self.viewer.enable_downscaling.set(enable_downscaling) if cfg.has_option("ui", "rename_and_move_dir"): self._rename_and_move_dir = cfg.get("ui", "rename_and_move_dir")
def check_sts_token(self, profile): """ Verifies that STS credentials are valid """ # Don't check for creds if profile is blank if not profile: return False parser = RawConfigParser() if not os.path.exists(self.creds_dir): self.logger.info( "AWS credentials path does not exist. Not checking.") return False elif not os.path.isfile(self.creds_file): self.logger.info( "AWS credentials file does not exist. Not checking.") return False parser.read(self.creds_file) if not parser.has_section(profile): self.logger.info( "No existing credentials found. Requesting new credentials.") return False # check if creds are normal elif not parser.has_option(self.profile, 'aws_access_key_id'): self.logger.info( "No AWS_ACCESS_KEY_ID. Requesting new credentials.") return False elif not parser.has_option(self.profile, 'aws_secret_access_key'): self.logger.info( "No AWS_SECRET_ACCESS_KEY. Requesting new credentials.") return False session = boto3.Session(profile_name=profile) sts = session.client('sts') try: sts.get_caller_identity() except ClientError as ex: if ex.response['Error']['Code'] == 'ExpiredToken': self.logger.info( "Temporary credentials have expired. Requesting new credentials." ) return False self.logger.info("STS credentials are valid. Nothing to do.") return True
def read_pypirc(repository=DEFAULT_REPOSITORY, realm=DEFAULT_REALM): """Read the .pypirc file.""" rc = get_pypirc_path() if os.path.exists(rc): config = RawConfigParser() config.read(rc) sections = config.sections() if "distutils" in sections: # let's get the list of servers index_servers = config.get("distutils", "index-servers") _servers = [server.strip() for server in index_servers.split("\n") if server.strip() != ""] if _servers == []: # nothing set, let's try to get the default pypi if "pypi" in sections: _servers = ["pypi"] else: # the file is not properly defined, returning # an empty dict return {} for server in _servers: current = {"server": server} current["username"] = config.get(server, "username") # optional params for key, default in (("repository", DEFAULT_REPOSITORY), ("realm", DEFAULT_REALM), ("password", None)): if config.has_option(server, key): current[key] = config.get(server, key) else: current[key] = default if current["server"] == repository or current["repository"] == repository: return current elif "server-login" in sections: # old format server = "server-login" if config.has_option(server, "repository"): repository = config.get(server, "repository") else: repository = DEFAULT_REPOSITORY return { "username": config.get(server, "username"), "password": config.get(server, "password"), "repository": repository, "server": server, "realm": DEFAULT_REALM, } return {}
def parse_config(filename, dirs=None): if dirs: filenames = [os.path.join(d, filename) for d in dirs] else: filenames = [filename] config = RawConfigParser() n = config.read(filenames) if not len(n) >= 1: raise PkgNotFound("Could not find file(s) %s" % str(filenames)) # Parse meta and variables sections meta = parse_meta(config) vars = {} if config.has_section('variables'): for name, value in config.items("variables"): vars[name] = _escape_backslash(value) # Parse "normal" sections secs = [s for s in config.sections() if not s in ['meta', 'variables']] sections = {} requires = {} for s in secs: d = {} if config.has_option(s, "requires"): requires[s] = config.get(s, 'requires') for name, value in config.items(s): d[name] = value sections[s] = d return meta, vars, sections, requires
def main(): econf = RawConfigParser() econf.read('/etc/uwsgi/emperor.ini') vdefconf = RawConfigParser() vdefconf.read(econf.get('uwsgi', 'vassals-include')) emperor_dir = econf.get('uwsgi', 'emperor') errors = [] for fname in Path(emperor_dir).iterdir(): if not (fname.is_file() and fname.suffix == '.ini'): continue vconf = RawConfigParser() vconf.read(fname) vname = fname.stem if not vconf.has_option('uwsgi', 'vassal-name'): errors.append( 'A "vassal-name" key is required in the uWSGI configuration for app "{}".' .format(vname)) continue uid = vconf.get('uwsgi', 'uid') gid = vconf.get('uwsgi', 'gid') pidfile = vdefconf.get('uwsgi', 'pidfile') runpath = Path(pidfile.replace('%(vassal-name)', vname)).parent runpath.mkdir(parents=True, exist_ok=True) chown(str(runpath), uid, gid) if errors: raise Exception('\n'.join(errors))
def read_config(self, configpath): parser = RawConfigParser({ 'query': self.query_path, 'quote_plus': 'on' if self.quote_fn is quote_plus else 'off', 'site': self.site, 'tlds': ' '.join(self.tld_list), 'hash_prefix_numbers': 'on' if self.hash_num else 'off'}) parser.read(configpath) section = self.site if parser.has_section(section): # we allow a single level of indirection to support limited # aliases. Inheritance is not currently supported if parser.has_option(section, 'alias'): self.site = parser.get(section, 'alias') return self.read_config(configpath) self.query_path = parser.get(section, 'query') self.quote_fn = (quote_plus if parser.getboolean(section, 'quote_plus') else quote) tlds = parser.get(section, 'tlds') self.tld_list = tlds.replace(',', ' ').split() self.hash_num = parser.getboolean(section, 'hash_prefix_numbers') self.site = parser.get(section, 'site')
def checkLoggingConfig(configfile): write = True config = RawConfigParser() if os.path.exists(configfile): config.read(configfile) write = False for s in defaults: if not config.has_section(s): config.add_section(s) write = True for k in defaults[s]: if not config.has_option(s, k): config.set(s, k, str(defaults[s][k])) # Remove sysLogHandler if you're on Windows if 'sysLogHandler' in config.get('handlers', 'keys'): config.set('handlers', 'keys', config.get('handlers', 'keys').replace('sysLogHandler', '')) write = True while config.get('handlers', 'keys').endswith(",") or config.get('handlers', 'keys').endswith(" "): config.set('handlers', 'keys', config.get('handlers', 'keys')[:-1]) write = True if write: fp = open(configfile, "w") config.write(fp) fp.close()
def readConfig(self): path = '' if os.path.exists(configLocalFileName): path = configLocalFileName elif os.path.exists(configDefaultFileName): path = configDefaultFileName else: return config = RawConfigParser() config.read(path) for o in configOptions: if config.has_option(configSectionName, o): val = getattr(self, o) # check to see if the current/default value is a boolean; if so, # makes user user supplied value is a bool, will convert string to bool. # ie. makes 0 = False and 1 = True if isinstance(val, bool): val = (config.get(configSectionName, o).strip().lower() == "true") if config.get(configSectionName, o) == "1": val = True if config.get(configSectionName, o) == "0": val = False else: val = config.get(configSectionName, o) setattr(self, o, val)
def run(): # load the config file global master_config master_config = RawConfigParser() master_config.read(config_directory + MASTER_CONFIG_FILENAME) # set the get-iplayer path global get_iplayer_path if master_config.has_option("General", "get-iplayer_path"): get_iplayer_path = master_config.get("General", "get-iplayer_path") # refresh the get-iplayer cache # print("Refreshing get-iplayer... (this may take some time)") # subprocess.check_output([get_iplayer_path, "--type=all", "--quiet"]) # new BBC rules :-( now we get the programm info externally using a different script # and read that scripts output directly into a hash # global bbc_programmes # bbc_programmes = load_bbc_programmes() # scan for feed config files and process each for root, directories, files in os.walk(config_directory + FEED_CONFIG_DIRECTORY): for filename in files: if filename == ".DS_Store": continue print("about to read config " + filename ) load_feed(filename) print("Finished.") return # stop here, we have processed the feeds # if we have not returned at this point, then no config directory was found, this is a problem print("No config directory found")
def get_comic_url(comic_info: RawConfigParser): comic_domain, base_directory = None, None if "GITHUB_REPOSITORY" in os.environ: repo_author, base_directory = os.environ["GITHUB_REPOSITORY"].split("/") comic_domain = f"http://{repo_author}.github.io" if comic_info.has_option("Comic Info", "Comic domain"): comic_domain = comic_info.get("Comic Info", "Comic domain").rstrip("/") if comic_info.has_option("Comic Info", "Comic subdirectory"): base_directory = comic_info.get("Comic Info", "Comic subdirectory").strip("/") if not comic_domain or not base_directory: raise ValueError( 'Set "Comic domain" and "Comic subdirectory" in the [Comic Info] section of your comic_info.ini file ' 'before building your site locally. Please see the comic_git wiki for more information.' ) comic_url = comic_domain + '/' + base_directory return comic_url, base_directory
def create(self, contact, duedelta: Optional[int] = None, **kwargs): if duedelta is None: duedelta = self.default_due with self.lock: today = datetime.date.today() due = today + datetime.timedelta(days=duedelta) filename = self.find_filename() invoice = RawConfigParser() invoice.add_section("invoice") invoice.set("invoice", "contact", contact) invoice.set("invoice", "date", today.isoformat()) invoice.set("invoice", "due", due.isoformat()) # Apply defaults from contact contact = self.read_contact(contact) for key, value in contact.items(): if not key.startswith("default_"): continue invoice.set("invoice", key[8:], value) # Apply passed value for key, value in kwargs.items(): invoice.set("invoice", key, value) # Ensure rate and item are present for key in ("rate", "item"): if not invoice.has_option("invoice", key): invoice.set("invoice", key, "") # Store the file self.ensure_dir(filename) with open(filename, "w") as handle: invoice.write(handle) return filename
def run(): # print a warning about copyright print("WARNING: Do not use the script to produce public podcasts, it is for personal use only.") print("If you publically serve programmes you may be in violation of the BBC's copyright.") # load the config file global master_config master_config = RawConfigParser() master_config.read(config_directory + MASTER_CONFIG_FILENAME) # set the get-iplayer path global get_iplayer_path if master_config.has_option("General", "get-iplayer_path"): get_iplayer_path = master_config.get("General", "get-iplayer_path") # refresh the get-iplayer cache print("Refreshing get-iplayer... (this may take some time)") subprocess.check_output([get_iplayer_path, "--type=all", "--quiet"]) # scan for feed config files and process each for root, directories, files in os.walk(config_directory + FEED_CONFIG_DIRECTORY): for filename in files: load_feed(filename) print("Finished.") return # stop here, we have processed the feeds # if we have not returned at this point, then no config directory was found, this is a problem print("No config directory found")
def _getExtendedConfs(self, conf_filename, conf_data, confs=None): """Return a list of tuple (conf_name, parser, encoding_errors). :param conf_filename: The path and name of the conf file. :param conf_data: Unparsed config data. :param confs: A list of confs that extend filename. :return: A list of confs ordered from extender to extendee. :raises IOError: If filename cannot be read. This method parses the config data and checks for encoding errors. It checks parsed config data for the extends key in the meta section. It reads the unparsed config_data from the extended filename. It passes filename, data, and the working list to itself. """ if confs is None: confs = [] encoding_errors = self._verifyEncoding(conf_data) parser = RawConfigParser() parser.readfp(StringIO(conf_data), conf_filename) confs.append((conf_filename, parser, encoding_errors)) if parser.has_option('meta', 'extends'): base_path = dirname(conf_filename) extends_name = parser.get('meta', 'extends') extends_filename = abspath('%s/%s' % (base_path, extends_name)) extends_data = read_content(extends_filename) self._getExtendedConfs(extends_filename, extends_data, confs) return confs
def load_jenkins_config(self): if os.path.exists(jenkins_conf_file): config_parser = RawConfigParser() config_parser.read(jenkins_conf_file) if config_parser.has_section(SECTION_TITLE): connection_names = config_parser.get(SECTION_TITLE, 'connections').split(',') # Load cached Jenkins connection configs from conf_dir & create empty Jenkins connections for connection_name in connection_names: JenkinsConnection.load_connection_config( self, connection_name) # Load cached job data from jenkins_data_dir for file_name in os.listdir(jenkins_data_dir): data_file = os.path.join(jenkins_data_dir, file_name) print( 'Loading locally cached Jenkins job data from file: {}' .format(data_file)) jenkins_connection = self.load_job_data(data_file) if jenkins_connection is not None: self.jenkins_connections[ jenkins_connection.name] = jenkins_connection if config_parser.has_option(SECTION_TITLE, 'reports'): report_names = config_parser.get(SECTION_TITLE, 'reports').split(',') for report_name in report_names: JenkinsReport.load_report_config(self, report_name)
class FactorioLocale: def __init__(self): self.conf = RawConfigParser() self.crap = RawConfigParser() def get_name(self, section, name): return self.conf.get(section, name) or '#%s#%s#' % (section, name) def load(self, csv): conf = RawConfigParser() # utf-8-sig per https://bugs.python.org/issue7185#msg94346 with open(csv, encoding='utf-8-sig') as f: conf.read_file(f) for sec in conf.sections(): if not self.conf.has_section(sec): self.conf.add_section(sec) self.crap.add_section(sec) for k, v in conf.items(sec): is_crap = False if '__' in v: is_crap = True if not is_crap: if self.conf.has_option(sec, k): if self.conf.get(sec, k).lower() != v.lower(): print('Overwriting locale %s (%r -> %r)' % (k, self.conf.get(sec, k), v)) self.conf.set(sec, k, v) else: if self.crap.has_option(sec, k): print('Overwriting crap locale %s (%r -> %r)' % (k, self.crap.get(sec, k), v)) self.crap.set(sec, k, v) def merge(self): for sec in self.crap.sections(): for k, v in self.crap.items(sec): if not self.conf.has_option(sec, k): print('Using crap locale %s (%r)' % (k, v)) self.conf.set(sec, k, v) def save(self, out): with open(out, 'w') as f: self.conf.write(f)
def _read_pypirc(self): """Reads the .pypirc file.""" rc = self._get_rc_file() if os.path.exists(rc): self.announce('Using PyPI login from %s' % rc) repository = self.repository or self.DEFAULT_REPOSITORY realm = self.realm or self.DEFAULT_REALM config = RawConfigParser() config.read(rc) sections = config.sections() if 'distutils' in sections: index_servers = config.get('distutils', 'index-servers') _servers = [server.strip() for server in index_servers. split('\n') if server.strip() != ''] if _servers == []: if 'pypi' in sections: _servers = ['pypi'] else: return {} for server in _servers: current = {'server': server} current['username'] = config.get(server, 'username') for key, default in (('repository', self. DEFAULT_REPOSITORY), ('realm', self.DEFAULT_REALM), ('password', None)): if config.has_option(server, key): current[key] = config.get(server, key) else: current[key] = default if server == 'pypi' and repository in (self. DEFAULT_REPOSITORY, 'pypi'): current['repository'] = self.DEFAULT_REPOSITORY return current if current['server'] == repository or current['repository' ] == repository: return current elif 'server-login' in sections: server = 'server-login' if config.has_option(server, 'repository'): repository = config.get(server, 'repository') else: repository = self.DEFAULT_REPOSITORY return {'username': config.get(server, 'username'), 'password': config.get(server, 'password'), 'repository': repository, 'server': server, 'realm': self.DEFAULT_REALM} return {}
def load_config(self): if os.path.exists(self.CONFIG): logger.debug("Loading config %s", self.CONFIG) config = RawConfigParser() config.read(self.CONFIG) # Read account settings if config.has_section("account"): if config.has_option("account", "cp_url"): self.cp_url = config.get("account", "cp_url") else: self.cp_url = self.CP_URL if config.has_option("account", "uuid"): self.account_uuid = config.get("account", "uuid") if config.has_option("account", "name"): self.account_name = config.get("account", "name") if config.has_option("account", "password"): self.account_password = config.get("account", "password") # Read system settings if config.has_section("system"): if config.has_option("system", "uuid"): self.system_uuid = config.get("system", "uuid") if config.has_option("system", "name"): self.system_name = config.get("system", "name") if config.has_option("system", "type"): self.system_type = config.get("system", "type") else: logger.debug("Unregistered installation") self.cp_url = self.CP_URL
def parse_cmake_module(s_in): config_docopt = None config_define = None config_export = None config_fetch = None if 'autocmake.cfg configuration::' not in s_in: return config_docopt, config_define, config_export, config_fetch s_out = [] is_rst_line = False for line in s_in.split('\n'): if is_rst_line: if len(line) > 0: if line[0] != '#': is_rst_line = False else: is_rst_line = False if is_rst_line: s_out.append(line[2:]) if '#.rst:' in line: is_rst_line = True autocmake_entry = '\n'.join(s_out).split('autocmake.cfg configuration::')[1] autocmake_entry = autocmake_entry.replace('\n ', '\n') # we prepend a fake section heading so that we can parse it with configparser autocmake_entry = '[foo]\n' + autocmake_entry buf = StringIO(autocmake_entry) config = RawConfigParser(dict_type=OrderedDict) config.readfp(buf) for section in config.sections(): if config.has_option(section, 'docopt'): config_docopt = config.get(section, 'docopt') if config.has_option(section, 'define'): config_define = config.get(section, 'define') if config.has_option(section, 'export'): config_export = config.get(section, 'export') if config.has_option(section, 'fetch'): config_fetch = config.get(section, 'fetch') return config_docopt, config_define, config_export, config_fetch
def test_dumped_values_in_tracini_sample(self): parser = RawConfigParser() filename = self.env.config.filename + '.sample' self.assertEqual([filename], parser.read(filename, 'utf-8')) self.assertEqual('#cc0,#0c0,#0cc,#00c,#c0c,#c00', parser.get('revisionlog', 'graph_colors')) self.assertEqual('disabled', parser.get('trac', 'secure_cookies')) self.assertTrue(parser.has_option('logging', 'log_format')) self.assertEqual('', parser.get('logging', 'log_format'))
def get_config( config_file: configparser.RawConfigParser, section: str, key: str, default_value: str = "", ) -> str: if config_file.has_option(section, key): return config_file.get(section, key) return default_value
def _check_submodule_no_git(self): """ Like ``_check_submodule_using_git``, but simply parses the .gitmodules file to determine if the supplied path is a git submodule, and does not exec any subprocesses. This can only determine if a path is a submodule--it does not perform updates, etc. This function may need to be updated if the format of the .gitmodules file is changed between git versions. """ gitmodules_path = os.path.abspath('.gitmodules') if not os.path.isfile(gitmodules_path): return False # This is a minimal reader for gitconfig-style files. It handles a few of # the quirks that make gitconfig files incompatible with ConfigParser-style # files, but does not support the full gitconfig syntax (just enough # needed to read a .gitmodules file). gitmodules_fileobj = io.StringIO() # Must use io.open for cross-Python-compatible behavior wrt unicode with io.open(gitmodules_path) as f: for line in f: # gitconfig files are more flexible with leading whitespace; just # go ahead and remove it line = line.lstrip() # comments can start with either # or ; if line and line[0] in (':', ';'): continue gitmodules_fileobj.write(line) gitmodules_fileobj.seek(0) cfg = RawConfigParser() try: cfg.readfp(gitmodules_fileobj) except Exception as exc: log.warn('Malformatted .gitmodules file: {0}\n' '{1} cannot be assumed to be a git submodule.'.format( exc, self.path)) return False for section in cfg.sections(): if not cfg.has_option(section, 'path'): continue submodule_path = cfg.get(section, 'path').rstrip(os.sep) if submodule_path == self.path.rstrip(os.sep): return True return False
class CertificateAuthorityConfig(object): """ Attempt to parse CA-s from openssl.cnf """ def __init__(self, *args): self._config = RawConfigParser() for arg in args: self._config.readfp(itertools.chain(["[global]"], open(os.path.expanduser(arg)))) def get(self, section, key, default=""): if self._config.has_option(section, key): return self._config.get(section, key) else: return default def instantiate_authority(self, slug): section = "CA_" + slug dirs = dict([(key, self.get(section, key)) for key in ("dir", "certificate", "crl", "certs", "new_certs_dir", "private_key", "revoked_certs_dir", "autosign_whitelist")]) # Variable expansion, eg $dir for key, value in dirs.items(): if "$" in value: dirs[key] = re.sub(r'\$([a-z]+)', lambda m:dirs[m.groups()[0]], value) dirs.pop("dir") dirs["email_address"] = self.get(section, "emailAddress") dirs["inbox"] = self.get(section, "inbox") dirs["outbox"] = self.get(section, "outbox") dirs["lifetime"] = int(self.get(section, "default_days", "1825")) extensions_section = self.get(section, "x509_extensions") if extensions_section: dirs["basic_constraints"] = self.get(extensions_section, "basicConstraints") dirs["key_usage"] = self.get(extensions_section, "keyUsage") dirs["extended_key_usage"] = self.get(extensions_section, "extendedKeyUsage") authority = CertificateAuthority(slug, **dirs) return authority def all_authorities(self): for section in self._config: if section.startswith("CA_"): try: yield self.instantiate_authority(section[3:]) except FileNotFoundError: pass def pop_certificate_authority(self): def wrapper(func): def wrapped(*args, **kwargs): slug = kwargs.pop("ca") kwargs["ca"] = self.instantiate_authority(slug) return func(*args, **kwargs) return wrapped return wrapper
def _find_default_profile(firefox_dir): """Try to find default/useful profile in firefox located in `firefox_dir` """ config = RawConfigParser({"Default": 0}) config.read(os.path.join(firefox_dir, "profiles.ini")) path = None # find Instal.* section and default profile for section in config.sections(): if section.startswith("Install"): if not config.has_option(section, "Default"): continue # found default profile path = make_absolute_and_check(firefox_dir, config.get(section, "Default")) if path: pretty.print_debug(__name__, "found install default profile", path) return path break pretty.print_debug("Install* default profile not found") # not found default profile, iterate profiles, try to find default for section in config.sections(): if not section.startswith("Profile"): continue if config.has_option(section, "Default") and \ config.get(section, "Default") == "1": path = make_absolute_and_check(firefox_dir, config.get(section, "Path")) if path: pretty.print_debug(__name__, "Found profile with default=1", section, path) break if not path and config.has_option(section, "Path"): path = make_absolute_and_check(firefox_dir, config.get(section, "Path")) return path
def get_config(self, path): """Read entry from configuration.""" result = None section, option = path.split(".", 1) filename = os.path.join(self.path, ".hg", "hgrc") config = RawConfigParser() config.read(filename) if config.has_option(section, option): result = config.get(section, option) return result
def get_firefox_profiles(self, directory): """ List all profiles """ cp = RawConfigParser() profile_list = [] if os.path.isfile(os.path.join(directory, 'profiles.ini')): try: cp.read(os.path.join(directory, 'profiles.ini')) for section in cp.sections(): if section.startswith('Profile') and cp.has_option( section, 'Path'): profile_path = None if cp.has_option(section, 'IsRelative'): if cp.get(section, 'IsRelative') == '1': profile_path = os.path.join( directory, cp.get(section, 'Path').strip()) elif cp.get(section, 'IsRelative') == '0': profile_path = cp.get(section, 'Path').strip() else: # No "IsRelative" in profiles.ini profile_path = os.path.join( directory, cp.get(section, 'Path').strip()) if profile_path: profile_list.append(profile_path.replace( '/', '\\')) except Exception as e: log.error(f'An error occurred while reading profiles.ini: {e}') else: for i in os.listdir(directory): i = os.path.join(directory, i, 'cookies.sqlite') if os.path.isfile(i): profile_list.append(i) return list(set(profile_list))
def load_flake8_config(filename, global_config=False, project_config=False): """ Returns flake8 settings from config file. More info: http://flake8.readthedocs.org/en/latest/config.html """ parser = RawConfigParser() # check global config if global_config and os.path.isfile(DEFAULT_CONFIG_FILE): parser.read(DEFAULT_CONFIG_FILE) # search config in filename dir and all parent dirs if project_config: parent = tail = os.path.abspath(filename) while tail: if parser.read([os.path.join(parent, fn) for fn in CONFIG_FILES]): break parent, tail = os.path.split(parent) result = {} if parser.has_section('flake8'): options = ( ('ignore', 'ignore', 'list'), ('select', 'select', 'list'), ('exclude', 'ignore_files', 'list'), ('max_line_length', 'pep8_max_line_length', 'int') ) for config, plugin, option_type in options: if not parser.has_option('flake8', config): config = config.replace('_', '-') if parser.has_option('flake8', config): if option_type == 'list': option_value = parser.get('flake8', config).strip() if option_value: result[plugin] = option_value.split(',') elif option_type == 'int': option_value = parser.get('flake8', config).strip() if option_value: result[plugin] = parser.getint('flake8', config) return result
def __init__(self, pypirc, repository, cache, dryrun): parser = RawConfigParser() parser.read(pypirc) self.pypirc = pypirc self.repository = repository if parser.has_option(repository, 'repository'): self.repository_url = parser.get(repository, 'repository') else: # this is the legacy pypi url that we use in the cache keys self.repository_url = 'https://pypi.python.org/pypi' self.cache = cache self.dryrun = dryrun
def get_addressbook_dirs(): ''' Get path to addressbook file from default profile. ''' for thome, tprofile in THUNDERBIRD_PROFILES: if os.path.isfile(tprofile): config = RawConfigParser() config.read(tprofile) for section in config.sections(): if config.has_option(section, "Path"): path = config.get(section, "Path") if not os.path.isabs(path): path = os.path.join(thome, path) if os.path.isdir(path): yield path
def get_firefox_home_file(needed_file): firefox_dir = os.path.expanduser("~/.mozilla/firefox") if not os.path.exists(firefox_dir): return None config = RawConfigParser({"Default" : 0}) config.read(os.path.join(firefox_dir, "profiles.ini")) path = None for section in config.sections(): if config.has_option(section, "Default") and config.get(section, "Default") == "1": path = config.get (section, "Path") break elif path == None and config.has_option(section, "Path"): path = config.get (section, "Path") if path == None: return "" if path.startswith("/"): return os.path.join(path, needed_file) return os.path.join(firefox_dir, path, needed_file)
class Config(object): def __init__(self, config_file): self.config_parser = ConfigParser() self.remove_option = self.config_parser.remove_option self.has_option = self.config_parser.has_option self.add_section = self.config_parser.add_section self.getboolean = self.config_parser.getboolean self.getint = self.config_parser.getint self.getfloat = self.config_parser.getfloat self.options = self.config_parser.options self.items = self.config_parser.items self.config_file = config_file def load(self): self.config_parser.read(self.config_file) def has_option(self, section, option): return self.config_parser.has_option(section, option) def get(self, section, option, default=None, debug=False): try: return self.config_parser.get(section, option) except Exception as e: if debug: print("function get got error: %s" % (e)) traceback.print_exc(file=sys.stdout) return default def set(self, section, option, value, debug=False): if not self.config_parser.has_section(section): if debug: print("Section \"%s\" not exist. create..." % (section)) self.add_section(section) self.config_parser.set(section, option, value) def write(self, given_filepath=None): if given_filepath: f = open(given_filepath, "w") else: f = open(self.config_file, "w") self.config_parser.write(f) f.close()
def load_configuration(filename): p = RawConfigParser() p.add_section('ss') p.read(filename) def read_if_defined(option, getter): if p.has_option('ss', option): value = getattr(p, getter)('ss', option) setattr(config, option, value) config = Configuration() read_if_defined('recursive', 'getboolean') read_if_defined('skip', 'getboolean') read_if_defined('mkv', 'getboolean') read_if_defined('parallel_jobs', 'getint') if p.has_option('ss', 'languages'): value = p.get('ss', 'languages') config.languages = [x.strip() for x in value.split(',')] return config
def load(self): """Load configurations from file.""" parser = RawConfigParser() parser.optionxform = str parser.read(os.path.join(app_config_dir, 'config')) if parser.has_option('Options', 'autostart'): self.autostart = parser.getboolean('Options', 'autostart') if parser.has_option('Options', 'refreshtime'): self.refreshtime = parser.getint('Options', 'refreshtime') if parser.has_option('Options', 'stoptimer'): self.stoptimer = parser.getboolean('Options', 'stoptimer') if parser.has_option('Options', 'items_per_feed'): self.items_per_feed = parser.getint('Options', 'items_per_feed') if parser.has_option('Options', 'show_notifications'): self.show_notifications = parser.getboolean('Options', 'show_notifications') if parser.has_option('Options', 'show_update_notifications'): self.show_update_notifications = parser. \ getboolean('Options', 'show_update_notifications') if parser.has_option('Options', 'feeds_at_top'): self.feeds_at_top = parser.getboolean('Options', 'feeds_at_top') if parser.has_option('Options', 'show_unread_feeds'): self.show_unread_feeds = parser.getboolean('Options', 'show_unread_feeds')
def __parse_configurations(self, __confpath, __isastream): '''Parse the different configurations''' try: # check if the path to the confs is a directory or a file if os.path.isdir(__confpath): __confs = [__file for __file in os.listdir(__confpath) if __file.endswith('.conf')] else: __confpath, __conft = os.path.split(__confpath) __confs = [__conft] # check if at least one configuration file is availabe if not __confs: __errmsg = 'Could not find any .conf file in {}' print(__errmsg.format(__confpath)) sys.exit(1) # parse the configuration files for __conf in __confs: __currentconf = {} __config = RawConfigParser() __fullconfpath = os.path.join('/'.join([__confpath, __conf])) try: with open(__fullconfpath, 'r') as __file: # strip GPG/PGP header and footer if it is a signed file __stripres = self.strip_gpg_header(__file, __fullconfpath) __config.read_string(__stripres) except UnicodeDecodeError as __err: __msg = 'Error while parsing the configuration file {}:'.format(__fullconfpath) print(__msg) print(__err) sys.exit(1) # Common information for the backups # The name of the backup __currentconf['name'] = __config.get('main', 'name') ### The type of the backups __currentconf['type'] = __config.get('main', 'type') # Common information for the archives ### The archive path __confsettings = [{'main': 'path'}, ### The list of the expected files in the archive {'main': 'files_list'}, ### The delimiter to use in the list of files {'main': 'delimiter'}, ### The hash sum to identify the list of files {'main': 'sha512'} ] for __element in __confsettings: __key, __value = __element.popitem() if __config.has_option(__key, __value): __currentconf[__value] = __config.get( __key, __value) else: __currentconf[__value] = __config.set( __key, __value, '') # Checking the information ### Check the paths in the configuration __confkeys= ('path', 'files_list') for __confkey in __confkeys: if __confkey == 'path' and __isastream: break else: __path = __currentconf[__confkey] if not __path: print('A path is missing in {}.'.format(__config.get('main', 'name'))) sys.exit(1) if not os.path.isabs(__path): __path = os.path.normpath(os.path.join(os.path.abspath(__confpath), __path)) __currentconf[__confkey] = __path # placeholder should be here plh = PlaceHolder(__currentconf[__confkey]) __currentconf[__confkey] = plh.realpath # test if the path exists if not os.path.exists(__currentconf[__confkey]): print('{} does not exist.'.format(__path)) sys.exit(1) # If the backup type is archive, path must not be a directory if not __isastream and __currentconf['type'] == 'archive' and os.path.isdir(__currentconf['path']): __errmsg = '{} is a directory but appears as an archive in configuration {}.' print(__errmsg.format(__currentconf['path'], __config.get('main', 'name'))) sys.exit(1) # check if the name of the conf does not exist yet if __config.get('main', 'name') in self.__configs: __errmsg = 'The configuration name in {} already exists. Please rename it.' print(__errmsg.format(__fullconfpath)) sys.exit(1) else: self.__configs[__config.get('main', 'name')] = __currentconf except (ParsingError, NoSectionError, NoOptionError, OSError, IOError) as __err: print(__err) sys.exit(1)
class CertificateAuthorityConfig(object): """ Attempt to parse CA-s from openssl.cnf """ def __init__(self, *args): self._config = RawConfigParser() for arg in args: self._config.readfp(itertools.chain(["[global]"], open(os.path.expanduser(arg)))) def get(self, section, key, default=""): if self._config.has_option(section, key): return self._config.get(section, key) else: return default def instantiate_authority(self, slug): section = "CA_" + slug dirs = dict( [ (key, self.get(section, key)) for key in ( "dir", "certificate", "crl", "certs", "new_certs_dir", "private_key", "revoked_certs_dir", "autosign_whitelist", ) ] ) # Variable expansion, eg $dir for key, value in dirs.items(): if "$" in value: dirs[key] = re.sub(r"\$([a-z]+)", lambda m: dirs[m.groups()[0]], value) dirs.pop("dir") dirs["email_address"] = self.get(section, "emailAddress") dirs["inbox"] = self.get(section, "inbox") dirs["outbox"] = self.get(section, "outbox") dirs["lifetime"] = int(self.get(section, "default_days", "1825")) extensions_section = self.get(section, "x509_extensions") if extensions_section: dirs["basic_constraints"] = self.get(extensions_section, "basicConstraints") dirs["key_usage"] = self.get(extensions_section, "keyUsage") dirs["extended_key_usage"] = self.get(extensions_section, "extendedKeyUsage") authority = CertificateAuthority(slug, **dirs) return authority def all_authorities(self): for section in self._config: if section.startswith("CA_"): try: yield self.instantiate_authority(section[3:]) except FileNotFoundError: pass def pop_certificate_authority(self): def wrapper(func): def wrapped(*args, **kwargs): slug = kwargs.pop("ca") kwargs["ca"] = self.instantiate_authority(slug) return func(*args, **kwargs) return wrapped return wrapper
def _read_pypirc(self): """Reads the .pypirc file.""" rc = self._get_rc_file() if os.path.exists(rc): self.announce('Using PyPI login from %s' % rc) repository = self.repository or self.DEFAULT_REPOSITORY realm = self.realm or self.DEFAULT_REALM config = RawConfigParser() config.read(rc) sections = config.sections() if 'distutils' in sections: # let's get the list of servers index_servers = config.get('distutils', 'index-servers') _servers = [server.strip() for server in index_servers.split('\n') if server.strip() != ''] if _servers == []: # nothing set, let's try to get the default pypi if 'pypi' in sections: _servers = ['pypi'] else: # the file is not properly defined, returning # an empty dict return {} for server in _servers: current = {'server': server} current['username'] = config.get(server, 'username') # optional params for key, default in (('repository', self.DEFAULT_REPOSITORY), ('realm', self.DEFAULT_REALM), ('password', None)): if config.has_option(server, key): current[key] = config.get(server, key) else: current[key] = default # work around people having "repository" for the "pypi" # section of their config set to the HTTP (rather than # HTTPS) URL if (server == 'pypi' and repository in (self.DEFAULT_REPOSITORY, 'pypi')): current['repository'] = self.DEFAULT_REPOSITORY return current if (current['server'] == repository or current['repository'] == repository): return current elif 'server-login' in sections: # old format server = 'server-login' if config.has_option(server, 'repository'): repository = config.get(server, 'repository') else: repository = self.DEFAULT_REPOSITORY return {'username': config.get(server, 'username'), 'password': config.get(server, 'password'), 'repository': repository, 'server': server, 'realm': self.DEFAULT_REALM} return {}
class Config(object): """A wrapper around RawConfigParser. Provides a ``defaults`` attribute of the same type which can be used to set default values. """ def __init__(self, version=None, _defaults=True): """Use read() to read in an existing config file. version should be an int starting with 0 that gets incremented if you want to register a new upgrade function. If None, upgrade is disabled. """ self._config = ConfigParser(dict_type=_sorted_dict) self.defaults = None if _defaults: self.defaults = Config(_defaults=False) self._version = version self._loaded_version = None self._upgrade_funcs = [] def _do_upgrade(self, func): assert self._loaded_version is not None assert self._version is not None old_version = self._loaded_version new_version = self._version if old_version != new_version: print_d("Config upgrade: %d->%d (%r)" % ( old_version, new_version, func)) func(self, old_version, new_version) def get_version(self): """Get the version of the loaded config file (for testing only) Raises Error if no file was loaded or versioning is disabled. """ if self._version is None: raise Error("Versioning disabled") if self._loaded_version is None: raise Error("No file loaded") return self._loaded_version def register_upgrade_function(self, function): """Register an upgrade function that gets called at each read() if the current config version and the loaded version don't match. Can also be registered after read was called. function(config, old_version: int, new_version: int) -> None """ if self._version is None: raise Error("Versioning disabled") self._upgrade_funcs.append(function) # after read(), so upgrade now if self._loaded_version is not None: self._do_upgrade(function) return function def reset(self, section, option): """Reset the value to the default state""" assert self.defaults is not None try: self._config.remove_option(section, option) except NoSectionError: pass def options(self, section): """Returns a list of options available in the specified section.""" try: options = self._config.options(section) except NoSectionError: if self.defaults: return self.defaults.options(section) raise else: if self.defaults: try: options.extend(self.defaults.options(section)) options = list_unique(options) except NoSectionError: pass return options def get(self, section, option, default=_DEFAULT): """get(section, option[, default]) -> str If default is not given or set, raises Error in case of an error """ try: return self._config.get(section, option) except Error: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.get(section, option) except Error: pass raise return default def gettext(self, *args, **kwargs): value = self.get(*args, **kwargs) # make sure there are no surrogates value.encode("utf-8") return value def getbytes(self, section, option, default=_DEFAULT): try: value = self._config.get(section, option) value = value.encode("utf-8", "surrogateescape") return value except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getbytes(section, option) except Error: pass raise Error(e) return default def getboolean(self, section, option, default=_DEFAULT): """getboolean(section, option[, default]) -> bool If default is not given or set, raises Error in case of an error """ try: return self._config.getboolean(section, option) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getboolean(section, option) except Error: pass raise Error(e) return default def getint(self, section, option, default=_DEFAULT): """getint(section, option[, default]) -> int If default is not give or set, raises Error in case of an error """ try: return int(self._config.getfloat(section, option)) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getint(section, option) except Error: pass raise Error(e) return default def getfloat(self, section, option, default=_DEFAULT): """getfloat(section, option[, default]) -> float If default is not give or set, raises Error in case of an error """ try: return self._config.getfloat(section, option) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getfloat(section, option) except Error: pass raise Error(e) return default def getstringlist(self, section, option, default=_DEFAULT): """getstringlist(section, option[, default]) -> list If default is not given or set, raises Error in case of an error. Gets a list of strings, using CSV to parse and delimit. """ try: value = self._config.get(section, option) parser = csv.reader( [value], lineterminator='\n', quoting=csv.QUOTE_MINIMAL) try: vals = next(parser) except (csv.Error, ValueError) as e: raise Error(e) return vals except Error as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getstringlist(section, option) except Error: pass raise Error(e) return default def setstringlist(self, section, option, values): """Saves a list of unicode strings using the csv module""" sw = StringIO() values = [str(v) for v in values] writer = csv.writer(sw, lineterminator='\n', quoting=csv.QUOTE_MINIMAL) writer.writerow(values) self.set(section, option, sw.getvalue()) def setlist(self, section, option, values, sep=","): """Saves a list of str using ',' as a separator and \\ for escaping""" values = [str(v) for v in values] joined = join_escape(values, sep) self.set(section, option, joined) def getlist(self, section, option, default=_DEFAULT, sep=","): """Returns a str list saved with setlist()""" try: value = self._config.get(section, option) return split_escape(value, sep) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getlist(section, option, sep=sep) except Error: pass raise Error(e) return default def set(self, section, option, value): """Saves the string representation for the passed value Don't pass unicode, encode first. """ if isinstance(value, bytes): raise TypeError("use setbytes") # RawConfigParser only allows string values but doesn't # scream if they are not (and it only fails before the # first config save..) if not isinstance(value, str): value = str(value) try: self._config.set(section, option, value) except NoSectionError: if self.defaults and self.defaults.has_section(section): self._config.add_section(section) self._config.set(section, option, value) else: raise def settext(self, section, option, value): value = str(value) # make sure there are no surrogates value.encode("utf-8") self.set(section, option, value) def setbytes(self, section, option, value): assert isinstance(value, bytes) value = value.decode("utf-8", "surrogateescape") self.set(section, option, value) def write(self, filename): """Write config to filename. Can raise EnvironmentError """ assert isinstance(filename, fsnative) mkdir(os.path.dirname(filename)) # temporary set the new version for saving if self._version is not None: self.add_section("__config__") self.set("__config__", "version", self._version) try: with atomic_save(filename, "wb") as fileobj: temp = StringIO() self._config.write(temp) data = temp.getvalue().encode("utf-8", "surrogateescape") fileobj.write(data) finally: if self._loaded_version is not None: self.set("__config__", "version", self._loaded_version) def clear(self): """Remove all sections.""" for section in self._config.sections(): self._config.remove_section(section) def is_empty(self): """Whether the config has any sections""" return not self._config.sections() def read(self, filename): """Reads the config from `filename` if the file exists, otherwise does nothing Can raise EnvironmentError, Error. """ try: with open(filename, "rb") as fileobj: fileobj = StringIO( fileobj.read().decode("utf-8", "surrogateescape")) self._config.readfp(fileobj, filename) except (IOError, OSError): return # don't upgrade if we just created a new config if self._version is not None: self._loaded_version = self.getint("__config__", "version", -1) for func in self._upgrade_funcs: self._do_upgrade(func) def has_option(self, section, option): """If the given section exists, and contains the given option""" return self._config.has_option(section, option) or ( self.defaults and self.defaults.has_option(section, option)) def has_section(self, section): """If the given section exists""" return self._config.has_section(section) or ( self.defaults and self.defaults.has_section(section)) def remove_option(self, section, option): """Remove the specified option from the specified section Can raise Error. """ return self._config.remove_option(section, option) def add_section(self, section): """Add a section named section to the instance if it not already exists.""" if not self._config.has_section(section): self._config.add_section(section)
class ParamStore(object): def __init__(self, root_dir, file_name): self._lock = threading.Lock() with self._lock: if not os.path.isdir(root_dir): raise RuntimeError( 'Directory "' + root_dir + '" does not exist.') self._path = os.path.join(root_dir, file_name) self._dirty = False # open config file self._config = RawConfigParser() self._config.read(self._path) def flush(self): if not self._dirty: return with self._lock: self._dirty = False with open(self._path, 'w') as of: self._config.write(of) def get(self, section, option, default=None): """Get a parameter value and return a string. If default is specified and section or option are not defined in the file, they are created and set to default, which is then the return value. """ with self._lock: if not self._config.has_option(section, option): if default is not None: self._set(section, option, default) return default return self._config.get(section, option) def get_datetime(self, section, option, default=None): result = self.get(section, option, default) if result: return WSDateTime.from_csv(result) return result def set(self, section, option, value): """Set option in section to string value.""" with self._lock: self._set(section, option, value) def _set(self, section, option, value): if not self._config.has_section(section): self._config.add_section(section) elif (self._config.has_option(section, option) and self._config.get(section, option) == value): return self._config.set(section, option, value) self._dirty = True def unset(self, section, option): """Remove option from section.""" with self._lock: if not self._config.has_section(section): return if self._config.has_option(section, option): self._config.remove_option(section, option) self._dirty = True if not self._config.options(section): self._config.remove_section(section) self._dirty = True
def has_option(self, option): return _RawConfigParser.has_option(self, self._default_section, option)
class APIVersionWriter(TemplateFileWriter): """ Provide useful method to write Java files. """ def __init__(self, monolithe_config, api_info): """ Initializes a _JavaSDKAPIVersionFileWriter """ super(APIVersionWriter, self).__init__(package="monolithe.generators.lang.vro") self.api_version = api_info["version"] self._api_version_string = SDKUtils.get_string_version(self.api_version) self.api_root = api_info["root"] self.api_prefix = api_info["prefix"] self.monolithe_config = monolithe_config self._output = self.monolithe_config.get_option("output", "transformer") self._name = self.monolithe_config.get_option("name", "transformer") self._class_prefix = "" self._product_accronym = self.monolithe_config.get_option("product_accronym") self._product_name = self.monolithe_config.get_option("product_name") self._url = self.monolithe_config.get_option("url", "transformer") self._package_prefix = self._get_package_prefix(self._url) self._package_name = self._package_prefix + ".vro." + self._name self._package_subdir = self._package_name.replace('.', '/') self.output_directory = "%s/vro" % (self._output) self.override_folder = os.path.normpath("%s/__overrides" % self.output_directory) self.fetchers_path = "/fetchers/" self.enums_path = "/enums/" self.attrs_defaults = RawConfigParser() path = "%s/vro/__attributes_defaults/attrs_defaults.ini" % self._output self.attrs_defaults.optionxform = str self.attrs_defaults.read(path) self.inventory_entities = RawConfigParser() path = "%s/vro/__attributes_defaults/inventory_entities.ini" % self._output self.inventory_entities.optionxform = str self.inventory_entities.read(path) self.workflow_attrs = RawConfigParser() path = "%s/vro/__attributes_defaults/workflow_attrs.ini" % self._output self.workflow_attrs.optionxform = str self.workflow_attrs.read(path) self.attrs_types = RawConfigParser() path = "%s/vro/__attributes_defaults/attrs_types.ini" % self._output self.attrs_types.optionxform = str self.attrs_types.read(path) plugin_info = RawConfigParser() path = "%s/vro/__attributes_defaults/plugin.ini" % self._output plugin_info.optionxform = str plugin_info.read(path) self.plugin_version = plugin_info.get(self.api_version, "pluginVersion") workflow_info = RawConfigParser() path = "%s/vro/__attributes_defaults/workflow.ini" % self._output workflow_info.optionxform = str workflow_info.read(path) self.workflow_version = workflow_info.get(self.api_version, "workflowVersion") with open("%s/vro/__code_header" % self._output, "r") as f: self.header_content = f.read() def perform(self, specifications): """ """ self._resolve_parent_apis(specifications) # Temporary fix, see method's comment for more info self._set_local_and_workflow_type(specifications) # Temporary until get_type_name is enhanced to include specificiation subtype and local_name self._write_file(self.output_directory, "pom.xml.tpl", "pom.xml") self._write_o11plugin(specifications) self._write_o11plugin_core(specifications) self._write_o11plugin_package(specifications) def _write_o11plugin(self, specifications): """ """ output_directory = "%s/o11nplugin-%s" % (self.output_directory, self._name.lower()) self._write_file(output_directory, "o11nplugin/pom.xml.tpl", "pom.xml") license_output_directory = "%s/src/main/vmoapp/VSO-INF" % (output_directory) os.makedirs(license_output_directory) copyfile("%s/LICENSE" % (self.output_directory), "%s/vsoapp.txt" % (license_output_directory)); icons_output_directory = "%s/src/main/dar/resources/images" % (output_directory) os.makedirs(icons_output_directory) icons_source_directory = "%s/__icons" % (self.output_directory) self._copyfile("icon-plugin.png", icons_source_directory, icons_output_directory) self._copyfile("icon-session.png", icons_source_directory, icons_output_directory) self._copyfile("icon-folder.png", icons_source_directory, icons_output_directory) for rest_name, specification in specifications.items(): self._copyfile("icon-%s.png" % (specification.entity_name.lower()), icons_source_directory, icons_output_directory) rmtree("%s" % (icons_source_directory)) def _write_o11plugin_core(self, specifications): """ """ output_directory = "%s/o11nplugin-%s-core" % (self.output_directory, self._name.lower()) self._write_file(output_directory, "o11nplugin-core/pom.xml.tpl", "pom.xml") source_output_directory = "%s/src/main/java/%s" % (output_directory, self._package_subdir) self._write_modulebuilder(source_output_directory, package_name=self._package_name) self._write_pluginadaptor(source_output_directory, package_name=self._package_name) self._write_pluginfactory(specifications, source_output_directory, package_name=self._package_name) model_package_name = self._package_name + ".model" model_source_output_directory = "%s/model" % (source_output_directory) self._write_constants(specifications, model_source_output_directory, package_name=model_package_name) self._write_sessionmanager(model_source_output_directory, package_name=model_package_name) self._write_session(specifications, model_source_output_directory, package_name=model_package_name) self._write_modelhelper(specifications, model_source_output_directory, package_name=model_package_name) task_manager = TaskManager() for rest_name, specification in specifications.items(): task_manager.start_task(method=self._write_model, specification=specification, specification_set=specifications, output_directory=model_source_output_directory, package_name=model_package_name) task_manager.start_task(method=self._write_fetcher, specification=specification, specification_set=specifications, output_directory=model_source_output_directory, package_name=model_package_name) for attribute in specification.attributes: if attribute.type == "enum" or attribute.subtype == "enum": task_manager.start_task(method=self._write_enum, specification=specification, attribute=attribute, output_directory=model_source_output_directory, package_name=model_package_name) task_manager.wait_until_exit() def _write_o11plugin_package(self, specifications): """ """ output_directory = "%s/o11nplugin-%s-package" % (self.output_directory, self._name.lower()) self._write_file(output_directory, "o11nplugin-package/pom.xml.tpl", "pom.xml") self._write_file("%s/src/main/resources/META-INF" % (output_directory), "o11nplugin-package/dunes-meta-inf.xml.tpl", "dunes-meta-inf.xml") copyfile("%s/archetype.keystore" % (self.output_directory), "%s/archetype.keystore" % (output_directory)); remove("%s/archetype.keystore" % (self.output_directory)) resources_output_directory = "%s/src/main/resources" % (output_directory) workflows_output_directory = "%s/Workflow" % (resources_output_directory) actions_output_directory = "%s/ScriptModule" % (resources_output_directory) workflow_package = "Session" workflow_directory = "%s/Library/VSPK/Basic/%s" % (workflows_output_directory, workflow_package) self._write_workflow_file(specification=None, specification_set=None, workflow_directory=workflow_directory, template_file="o11nplugin-package/Add Session.element_info.xml.tpl", filename="Add Session.element_info.xml", workflow_type="add", workflow_id=None, attrs_includes=None, attrs_excludes=None, workflow_name="Add Session", workflow_package=workflow_package, parent_spec=None) self._write_workflow_file(specification=None, specification_set=None, workflow_directory=workflow_directory, template_file="o11nplugin-package/Add Session.xml.tpl", filename="Add Session.xml", workflow_type="add", workflow_id=None, attrs_includes=None, attrs_excludes=None, workflow_name = "Add Session", workflow_package=workflow_package, parent_spec=None) self._write_workflow_file(specification=None, specification_set=None, workflow_directory=workflow_directory, template_file="o11nplugin-package/Remove Session.element_info.xml.tpl", filename="Remove Session.element_info.xml", workflow_type="remove", workflow_id=None, attrs_includes=None, attrs_excludes=None, workflow_name = "Remove Session", workflow_package=workflow_package, parent_spec=None) self._write_workflow_file(specification=None, specification_set=None, workflow_directory=workflow_directory, template_file="o11nplugin-package/Remove Session.xml.tpl", filename="Remove Session.xml", workflow_type="remove", workflow_id=None, attrs_includes=None, attrs_excludes=None, workflow_name = "Remove Session", workflow_package=workflow_package, parent_spec=None) for rest_name, specification in specifications.items(): for attribute in specification.attributes: attrs_includes = self._get_entity_list_filter(self.workflow_attrs, specification.entity_name, "includes") attrs_excludes = self._get_entity_list_filter(self.workflow_attrs, specification.entity_name, "excludes") if (attribute.required or attribute.local_name in attrs_includes) and (not attribute.local_name in attrs_excludes): if attribute.type == "enum" or attribute.type == "list": self._write_action_files(specification=specification, attribute=attribute, package_name=self._package_name, output_directory=actions_output_directory) for rest_name, specification in specifications.items(): if not specification.is_root: attrs_includes = self._get_entity_list_filter(self.workflow_attrs, specification.entity_name, "includes") attrs_excludes = self._get_entity_list_filter(self.workflow_attrs, specification.entity_name, "excludes") for parent_api in specification.parent_apis: workflow_package = "Other" if specification.package is None else specification.package.capitalize() if parent_api.rest_name in specifications: parent_spec = specifications[parent_api.rest_name] if parent_spec: entity_excludes = self._get_entity_list_filter(self.inventory_entities, parent_spec.entity_name, "excludes") if specification.entity_name not in entity_excludes: if parent_api.allows_create: self._write_workflow_files(specification=specification, specification_set=specifications, output_directory=workflows_output_directory, workflow_type="add", attrs_includes=attrs_includes, attrs_excludes=attrs_excludes, workflow_name="Add %s to %s" % (specification.entity_name, parent_spec.entity_name), workflow_package=workflow_package, parent_spec=parent_spec) if parent_api.allows_create or parent_spec.is_root: self._write_workflow_files(specification=specification, specification_set=specifications, output_directory=workflows_output_directory, workflow_type="find", attrs_includes=attrs_includes, attrs_excludes=attrs_excludes, workflow_name="Find %s in %s" % (specification.entity_name, parent_spec.entity_name), workflow_package=workflow_package, parent_spec=parent_spec) self._write_workflow_files(specification=specification, specification_set=specifications, output_directory=workflows_output_directory, workflow_type="edit", attrs_includes=attrs_includes, attrs_excludes=attrs_excludes, workflow_name="Edit %s" % (specification.entity_name), workflow_package=workflow_package) self._write_workflow_files(specification=specification, specification_set=specifications, output_directory=workflows_output_directory, workflow_type="remove", attrs_includes=attrs_includes, attrs_excludes=attrs_excludes, workflow_name="Remove %s" % (specification.entity_name), workflow_package=workflow_package) def _write_session(self, specifications, output_directory, package_name): """ """ template_file = "o11nplugin-core/session.java.tpl" base_name = "Session" filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, package_name=package_name, specifications=list(specifications.values()), root_entity=specifications[self.api_root]) def _write_model(self, specification, specification_set, output_directory, package_name): """ Write autogenerate specification file """ template_file = "o11nplugin-core/model.java.tpl" filename = "%s%s.java" % (self._class_prefix, specification.entity_name) override_content = self._extract_override_content(specification.entity_name) superclass_name = "BaseRootObject" if specification.rest_name == self.api_root else "BaseObject" defaults = {} section = specification.entity_name if self.attrs_defaults.has_section(section): for attribute in self.attrs_defaults.options(section): defaults[attribute] = self.attrs_defaults.get(section, attribute) entity_includes = self._get_entity_list_filter(self.inventory_entities, section, "includes") entity_excludes = self._get_entity_list_filter(self.inventory_entities, section, "excludes") entity_name_attr = "id" if self.inventory_entities.has_section(section): if self.inventory_entities.has_option(section, "name"): entity_name_attr = self.inventory_entities.get(section, "name") self.write(destination=output_directory, filename=filename, template_name=template_file, specification=specification, specification_set=specification_set, version=self.api_version, name=self._name, class_prefix=self._class_prefix, product_accronym=self._product_accronym, override_content=override_content, superclass_name=superclass_name, header=self.header_content, version_string=self._api_version_string, package_name=package_name, attribute_defaults=defaults, entity_name_attr=entity_name_attr, root_api=self.api_root, entity_includes=entity_includes, entity_excludes=entity_excludes) return (filename, specification.entity_name) def _write_fetcher(self, specification, specification_set, output_directory, package_name): """ Write fetcher """ template_file = "o11nplugin-core/fetcher.java.tpl" destination = "%s%s" % (output_directory, self.fetchers_path) base_name = "%sFetcher" % specification.entity_name_plural filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=destination, filename=filename, template_name=template_file, specification=specification, specification_set=specification_set, class_prefix=self._class_prefix, product_accronym=self._product_accronym, override_content=override_content, header=self.header_content, name=self._name, version_string=self._api_version_string, package_name=package_name) return (filename, specification.entity_name_plural) def _write_modulebuilder(self, output_directory, package_name): """ """ template_file = "o11nplugin-core/modulebuilder.java.tpl" base_name = "ModuleBuilder" filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, package_name=package_name) def _write_pluginadaptor(self, output_directory, package_name): """ """ template_file = "o11nplugin-core/pluginadaptor.java.tpl" base_name = "PluginAdaptor" filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, package_name=package_name) def _write_pluginfactory(self, specifications, output_directory, package_name): """ """ template_file = "o11nplugin-core/pluginfactory.java.tpl" base_name = "PluginFactory" filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, package_name=package_name, specification_set=specifications, specifications=list(specifications.values())) def _write_constants(self, specifications, output_directory, package_name): """ """ template_file = "o11nplugin-core/constants.java.tpl" base_name = "Constants" filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, product_name=self._product_name, package_name=package_name, specification_set=specifications, specifications=list(specifications.values())) def _write_sessionmanager(self, output_directory, package_name): """ """ template_file = "o11nplugin-core/sessionmanager.java.tpl" base_name = "SessionManager" filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, package_name=package_name) def _write_modelhelper(self, specifications, output_directory, package_name): """ """ template_file = "o11nplugin-core/modelhelper.java.tpl" base_name = "ModelHelper" filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, product_name=self._product_name, package_name=package_name, specification_set=specifications, specifications=list(specifications.values())) def _write_action_files(self, specification, attribute, package_name, output_directory): """ """ action_unique_name = "action-" + specification.entity_name.encode('ascii') + '-get-' + attribute.local_name.encode('ascii') action_id = uuid.uuid5(uuid.NAMESPACE_OID, action_unique_name) action_directory = "%s/%s" % (output_directory, self._package_subdir) if not os.path.exists(action_directory): makedirs(action_directory) action_name = "get%s%s" %(specification.entity_name, attribute.local_name[0:1].upper() + attribute.local_name[1:]) self._write_action_file(specification=specification, attribute=attribute, action_directory=action_directory, template_file="o11nplugin-package/get_entity_attribute_action.element_info.xml.tpl", filename="%s.element_info.xml" % (action_name), action_name=action_name, action_id=action_id) self._write_action_file(specification=specification, attribute=attribute, action_directory=action_directory, template_file="o11nplugin-package/get_entity_attribute_action.xml.tpl", filename="%s.xml" % (action_name), action_name=action_name, action_id=action_id) def _write_action_file(self, specification, attribute, action_directory, template_file, filename, action_name, action_id): """ """ self.write(destination=action_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, api_prefix=self.api_prefix, product_name=self._product_name, name=self._name, header=self.header_content, version_string=self._api_version_string, package_prefix=self._package_prefix, package_name=self._package_name, specification=specification, attribute=attribute, action_name = action_name, action_id=action_id, workflow_version=self.workflow_version) def _write_workflow_files(self, specification, specification_set, output_directory, workflow_type, attrs_includes, attrs_excludes, workflow_name, workflow_package, parent_spec = None): """ """ workflow_unique_name = specification.entity_name.encode('ascii') + '-' + workflow_type + ('-' + parent_spec.entity_name.encode('ascii') if parent_spec else "") workflow_id = uuid.uuid5(uuid.NAMESPACE_OID, workflow_unique_name) workflow_directory = "%s/Library/VSPK/Basic/%s" % (output_directory, workflow_package) if not os.path.exists(workflow_directory): makedirs(workflow_directory) self._write_workflow_file(specification=specification, specification_set=specification_set, workflow_directory=workflow_directory, template_file="o11nplugin-package/%s_workflow.element_info.xml.tpl" % (workflow_type), filename="%s.element_info.xml" % (workflow_name), workflow_type=workflow_type, workflow_id=workflow_id, attrs_includes=attrs_includes, attrs_excludes=attrs_excludes, workflow_name=workflow_name, workflow_package=workflow_package, parent_spec=parent_spec) self._write_workflow_file(specification=specification, specification_set=specification_set, workflow_directory=workflow_directory, template_file="o11nplugin-package/%s_workflow.xml.tpl" % (workflow_type), filename="%s.xml" % (workflow_name), workflow_type=workflow_type, workflow_id=workflow_id, attrs_includes=attrs_includes, attrs_excludes=attrs_excludes, workflow_name=workflow_name, workflow_package=workflow_package, parent_spec=parent_spec) def _write_workflow_file(self, specification, specification_set, workflow_directory, template_file, filename, workflow_type, workflow_id, attrs_includes, attrs_excludes, workflow_name, workflow_package, parent_spec): """ """ self.write(destination=workflow_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, api_prefix=self.api_prefix, product_name=self._product_name, name=self._name, header=self.header_content, version_string=self._api_version_string, package_prefix=self._package_prefix, package_name=self._package_name, specification=specification, specification_set=specification_set, workflow_type=workflow_type, workflow_id=workflow_id, attrs_includes=attrs_includes, attrs_excludes=attrs_excludes, workflow_name=workflow_name, parent_spec=parent_spec, workflow_version=self.workflow_version, workflow_package=workflow_package) def _write_enum(self, specification, attribute, output_directory, package_name): """ Write autogenerate specification file """ enum_name = specification.entity_name + attribute.local_name[0:1].upper() + attribute.local_name[1:] template_file = "o11nplugin-core/enum.java.tpl" destination = "%s%s" % (output_directory, self.enums_path) filename = "%s%s.java" % (self._class_prefix, enum_name) self.write(destination=destination, filename=filename, template_name=template_file, header=self.header_content, specification=specification, package_name=package_name, enum_name=enum_name, attribute=attribute) return (filename, specification.entity_name) def _write_file(self, output_directory, template_file, filename): """ """ self.write(destination=output_directory, filename=filename, template_name=template_file, version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, api_prefix=self.api_prefix, product_name=self._product_name, name=self._name, header=self.header_content, version_string=self._api_version_string, package_prefix=self._package_prefix, package_name=self._package_name, plugin_version=self.plugin_version) def _extract_override_content(self, name): """ """ # find override file specific_override_path = "%s/%s_%s%s.override.java" % (self.override_folder, self.api_version, self._class_prefix, name.title()) generic_override_path = "%s/%s%s.override.java" % (self.override_folder, self._class_prefix, name.title()) final_path = specific_override_path if os.path.exists(specific_override_path) else generic_override_path # Read override from file override_content = None if os.path.isfile(final_path): override_content = open(final_path).read() return override_content def _get_package_prefix(self, url): "" "" hostname_parts = self._get_hostname_parts(url) package_name = "" for index, hostname_part in enumerate(reversed(hostname_parts)): package_name = package_name + hostname_part if index < len(hostname_parts) - 1: package_name = package_name + '.' return package_name def _get_hostname_parts(self, url): "" "" if url.find("http://") != 0: url = "http://" + url hostname = urlparse(url).hostname hostname_parts = hostname.split('.') valid_hostname_parts = [] for hostname_part in hostname_parts: if hostname_part != "www": valid_hostname_parts.append(hostname_part) return valid_hostname_parts # Custom version of this method until the main one gets fixed def _resolve_parent_apis(self, specifications): """ """ for specification_rest_name, specification in specifications.items(): specification.parent_apis[:] = [] for rest_name, remote_spec in specifications.items(): for related_child_api in remote_spec.child_apis: if related_child_api.rest_name == specification.rest_name: parent_api = SpecificationAPI(specification=remote_spec) parent_api.rest_name = remote_spec.rest_name if related_child_api.allows_get: parent_api.allows_get = True if related_child_api.allows_create: parent_api.allows_create = True if related_child_api.allows_update: parent_api.allows_update = True if related_child_api.allows_delete: parent_api.allows_Delete = True specification.parent_apis.append(parent_api) def _set_local_and_workflow_type(self, specifications): "" "" for rest_name, specification in specifications.items(): for attribute in specification.attributes: if attribute.type == "string": attribute.workflow_type = "string" elif attribute.type == "integer": attribute.workflow_type = "number" elif attribute.type == "boolean": attribute.workflow_type = "boolean" elif attribute.type == "time": attribute.workflow_type = "number" elif attribute.type == "float": attribute.workflow_type = "number" elif attribute.type == "enum": enum_type = specification.entity_name + attribute.local_name[0:1].upper() + attribute.local_name[1:] attribute.local_type = enum_type attribute.workflow_type = self._name.upper() + ':' + enum_type elif attribute.type == "object": attr_type = "Object" if self.attrs_types.has_option(specification.entity_name, attribute.local_name): type = self.attrs_types.get(specification.entity_name, attribute.local_name) if type: attr_type = type attribute.local_type = attr_type attribute.workflow_type = self._name.upper() + ':' + attr_type elif attribute.type == "list": if attribute.subtype == "enum": enum_subtype = specification.entity_name + attribute.local_name[0:1].upper() + attribute.local_name[1:] attribute.local_type = "java.util.List<" + enum_subtype + ">" attribute.workflow_type = "Array/" + self._name.upper() + ':' + enum_subtype elif attribute.subtype == "object": attr_subtype = "com.fasterxml.jackson.databind.JsonNode" if self.attrs_types.has_option(specification.entity_name, attribute.local_name): subtype = self.attrs_types.get(specification.entity_name, attribute.local_name) if subtype: attr_subtype = subtype attribute.local_type = "java.util.List<" + attr_subtype + ">" attribute.workflow_type = "Array/" + self._name.upper() + ':' + attr_subtype elif attribute.subtype == "entity": attribute.local_type = "java.util.List<com.fasterxml.jackson.databind.JsonNode>" attribute.workflow_type = "Array/string" else: attribute.local_type = "java.util.List<String>" attribute.workflow_type = "Array/string" def _copyfile(self, filename, input_directory, output_directory): "" "" input_file = "%s/%s" % (input_directory, filename) if os.path.isfile(input_file): output_file = "%s/%s" % (output_directory, filename) copyfile(input_file, output_file) def _get_entity_list_filter(self, collection, section, tag): "" "" entities = [] if collection.has_option("all", tag): entity_list_str = collection.get("all", tag) entities = entities + entity_list_str.split(", ") if collection.has_option(section, tag): entity_list_str = collection.get(section, tag) entities = entities + entity_list_str.split(", ") return entities
def __init__(self, command_line_options): self.configuration_file = '.gib.conf' self.directory_to_backup = None self.directory_to_backup_from = None self.git_directory = None self.git_directory_from = None self.branch = None self.branch_from = None if command_line_options.directory: self.directory_to_backup = command_line_options.directory self.directory_to_backup_from = OptionFrom.COMMAND_LINE else: if 'HOME' not in os.environ: # Then we can't use HOME as default directory: print_stderr("The HOME environment variable was not set") sys.exit(Errors.STRANGE_ENVIRONMENT) self.directory_to_backup = os.environ['HOME'] self.directory_to_backup_from = OptionFrom.DEFAULT_VALUE # We need to make sure that this is an absolute path before # changing directory: self.directory_to_backup = os.path.abspath(self.directory_to_backup) if not exists_and_is_directory(self.directory_to_backup): sys.exit(Errors.DIRECTORY_TO_BACKUP_MISSING) # Now we know the directory that we're backing up, try to load the # config file: configuration = RawConfigParser() configuration.read(os.path.join(self.directory_to_backup, self.configuration_file)) # Now set the git directory: if command_line_options.git_directory: self.git_directory = command_line_options.git_directory self.git_directory_from = OptionFrom.COMMAND_LINE elif configuration.has_option('repository','git_directory'): self.git_directory = configuration.get( 'repository','git_directory' ) self.git_directory_from = OptionFrom.CONFIGURATION_FILE else: self.git_directory = os.path.join(self.directory_to_backup,'.git') self.git_directory_from = OptionFrom.DEFAULT_VALUE if not os.path.isabs(self.git_directory): print_stderr("The git directory must be an absolute path.") sys.exit(Errors.GIT_DIRECTORY_RELATIVE) # And finally the branch: if command_line_options.branch: self.branch = command_line_options.branch self.branch_from = OptionFrom.COMMAND_LINE elif configuration.has_option('repository','branch'): self.branch = configuration.get('repository','branch') self.branch_from = OptionFrom.CONFIGURATION_FILE else: self.branch = 'master' self.branch_from = OptionFrom.DEFAULT_VALUE # Check that the git_directory ends in '.git': if not re.search('\.git/*$',self.git_directory): message = "The git directory ({}) did not end in '.git'" print_stderr(message.format(self.git_directory)) sys.exit(Errors.BAD_GIT_DIRECTORY) # Also check that it actually exists: if not os.path.exists(self.git_directory): message = "The git directory '{}' does not exist." print_stderr(message.format(self.git_directory)) sys.exit(Errors.GIT_DIRECTORY_MISSING)