def load_rcfile(self): if os.path.exists(self.RCFILE): config = RawConfigParser() config.optionxform = lambda x: x.upper() config.read(self.RCFILE) items = [] if config.has_section('global'): items.extend(config.items('global')) if self.NAME is not None and config.has_section(self.NAME): items.extend(config.items(self.NAME)) for k,v in items: if k in CONFIG_TYPES and getattr(self,k) == DEFAULT_SETTINGS[k]: if CONFIG_TYPES[k] == 'str': setattr(self, k, v) elif CONFIG_TYPES[k] == 'int': setattr(self, k, int(v)) elif CONFIG_TYPES[k] == 'float': setattr(self, k, float(v)) elif CONFIG_TYPES[k] == 'list': setattr(self, k, [i.strip() for i in v.split(",")]) elif CONFIG_TYPES[k] == 'bool': if v.lower() in ('1', 'yes', 'true', 'on'): setattr(self, k, True) elif v.lower() in ('0', 'no', 'false', 'off'): setattr(self, k, False) else: raise ValueError("Not a boolean: %s" % v)
def load_rcfile(self): self.process_args() if self.RCFILE == parser.get_default('RCFILE') and \ not os.path.exists(self.RCFILE) and os.path.exists(OLD_RCFILE): logger.warning("Using old rcfile found at %s, " "please rename to %s.", OLD_RCFILE, self.RCFILE) self.RCFILE = OLD_RCFILE if os.path.exists(self.RCFILE): config = RawConfigParser() config.optionxform = lambda x: x.upper() config.read(self.RCFILE) items = [] if config.has_section('global'): items.extend(config.items('global')) if self.NAME is not None and config.has_section(self.NAME): items.extend(config.items(self.NAME)) try: return self.parse_rcvalues(items) except (ValueError, argparse.ArgumentTypeError) as e: raise RuntimeError("Unable to parse RC values: %s" % e) return {}
def write_config(self, survey_dict=None, mosaic_dict=None, constants_dict=None, spectral_dict=None, spatial_dict=None): """ Writes all of the needed information to the config file called <mosaicname>.cfg """ if not os.path.isdir(SURVEY_CONFIG_DIR): os.makedirs(SURVEY_CONFIG_DIR) configfilename = survey_dict['survey'] + '_' + mosaic_dict['mosaic'] config = RawConfigParser() config.read(configfilename + '.cfg') if not config.has_section('survey'): config.add_section('survey') for variable, value in survey_dict.items(): config.set('survey', variable, value) self.logger.info('wrote common config to ' + configfilename + '.cfg.') if mosaic_dict: if config.has_section('mosaic'): self.logger.info("mosaic config exists, overwriting...") else: config.add_section('mosaic') for variable, value in mosaic_dict.items(): config.set('mosaic', variable, value) self.logger.info("wrote mosaic config to " + configfilename + ".cfg.") if constants_dict: if config.has_section('constants'): self.logger.info("constants config exists, overwriting...") else: config.add_section('constants') for variable, value in constants_dict.items(): config.set('constants', variable, value) self.logger.info("wrote constants config to " + configfilename + ".cfg.") if spectral_dict: if config.has_section('spectralSearch'): self.logger.info("spectralSearch config exists, overwriting...") else: config.add_section('spectralSearch') for variable, value in spectral_dict.items(): config.set('spectralSearch', variable, value) self.logger.info("wrote spectralSearch config to " + configfilename + ".cfg.") if spatial_dict: if config.has_section('spatialSearch'): self.logger.info("spatialSearch config exists, overwriting...") else: config.add_section('spatialSearch') for variable, value in spatial_dict.items(): config.set('spatialSearch', variable, value) self.logger.info("wrote spatialSearch config to " + configfilename + ".cfg.") with open(SURVEY_CONFIG_DIR + configfilename + '.cfg', 'w') as configfile: config.write(configfile)
def load_rcfile(self): if os.path.exists(self.RCFILE): config = RawConfigParser() config.optionxform = lambda x: x.upper() config.read(self.RCFILE) items = [] if config.has_section('global'): items.extend(config.items('global')) if self.NAME is not None and config.has_section(self.NAME): items.extend(config.items(self.NAME)) self.load_rcvalues(items) self.update_implications()
class Options: def __init__(self, name, defaults): load_paths = list(reversed([os.path.join(directory, '%s.cfg' % name) for directory in load_config_paths(name)])) self.save_path = os.path.join(save_config_path(name), '%s.cfg' % name) self.config = RawConfigParser(defaults) self.paths = self.config.read(load_paths) self.section = name if self.config.has_section(name) else 'DEFAULT' def get(self, option): try: return self.config.get(self.section, option) except NoOptionError: return None def set(self, option, value): if value is not None: self.config.set(self.section, option, value) else: self.config.remove_option(self.section, option) defaults = self.config._defaults self.config._defaults = None with open(self.save_path, 'w') as save_file: self.config.write(save_file) self.config._defaults = defaults
def __init__(self, config_path=None): if config_path is None: config_path = 'regexbot.ini' config = RawConfigParser() config.read_dict(DEFAULT_CONFIG) config.read(config_path) self.rtm_token = config.get('regexbot', 'rtm_token') self.channel_flood_cooldown = timedelta(seconds=config.getint('regexbot', 'channel_flood_cooldown')) self.global_flood_cooldown = timedelta(seconds=config.getint('regexbot', 'global_flood_cooldown')) self.max_messages = config.getint('regexbot', 'max_messages') self.max_message_size = config.getint('regexbot', 'max_message_size') self.version = str(config.get('regexbot', 'version')) + '; %s' try: self.version = self.version % Popen(["git","branch","-v","--contains"], stdout=PIPE).communicate()[0].strip() except: self.version = self.version % 'unknown' self._last_message_times = {} self._last_message = datetime.utcnow() self._message_buffer = {} self.ignore_list = [] if config.has_section('ignore'): for k,v in config.items('ignore'): try: self.ignore_list.append(regex.compile(str(v), regex.I)) except Exception, ex: print "Error compiling regular expression in ignore list (%s):" % k print " %s" % v print ex exit(1)
def _read_from_sections(user, collection_url, permission): regex = ConfigParser({'login': user, 'path': collection_url}) for rights in (INITIAL_RIGHTS, settings.DJRADICALE_RIGHTS): for section, values in rights.items(): if not regex.has_section(section): regex.add_section(section) for key, value in values.items(): regex.set( section, key, value % { 'login': re.escape(user), 'path': re.escape(collection_url), }) log.LOGGER.debug("Rights type '%s'" % __name__) for section in regex.sections(): re_user = regex.get(section, 'user') re_collection = regex.get(section, 'collection') log.LOGGER.debug( "Test if '%s:%s' matches against '%s:%s' from section '%s'" % ( user, collection_url, re_user, re_collection, section)) user_match = re.match(re_user, user) if user_match: re_collection = re_collection.format(*user_match.groups()) if re.match(re_collection, collection_url): log.LOGGER.debug("Section '%s' matches" % section) if permission in regex.get(section, 'permission'): return True else: log.LOGGER.debug("Section '%s' does not match" % section) return False
def parse_and_append(self, filename): try: parser = RawConfigParser() parser.read([filename]) if not parser.has_section(sect): return app_categories = parser.get(sect, 'Categories') if not app_categories: return if not any(category in self.PLAYER_CATEGORIES for category in app_categories.split(';')): return # Find out if we need it by comparing mime types app_mime = parser.get(sect, 'MimeType') for needed_type in self.mimetypes: if app_mime.find(needed_type + '/') != -1: app_name = parser.get(sect, 'Name') app_cmd = parser.get(sect, 'Exec') app_icon = parser.get(sect, 'Icon') if not self.__has_sep: self.add_separator() self.apps.append(UserApplication(app_name, app_cmd, app_mime, app_icon)) return except: return
def _has_required_metadata(setup_cfg): config = RawConfigParser() config.read([setup_cfg], encoding="utf8") return ( config.has_section("metadata") and "name" in config.options("metadata") and "version" in config.options("metadata") )
def read_config(self, configfilename): """ Returns all of the needed information from the config file called <name>.cfg. Also checks to make sure all of the config parameters are set based on the configure dictionaries given in the configDictionaryList. """ survey_dict = {} mosaic_dict = {} utils_dict = {} spectral_dict = {} spatial_dict = {} try: self.check_for_files([SURVEY_CONFIG_DIR + configfilename + ".cfg"]) self.logger.info('Reading from config file (' + configfilename + '.cfg)') config = RawConfigParser() config.read(SURVEY_CONFIG_DIR + configfilename + '.cfg') if config.has_section('survey'): survey_dict = dict(config.items('survey')) if config.has_section('mosaic'): mosaic_dict = dict(config.items('mosaic')) if config.has_section('constants'): utils_dict = dict(config.items('constants')) if config.has_section('spectralSearch'): spectral_dict = dict(config.items('spectralSearch')) if config.has_section('spatialSearch'): spatial_dict = dict(config.items('spatialSearch')) list_of_dict = { 'survey': survey_dict, 'mosaic': mosaic_dict, 'constants': utils_dict, 'spectral': spectral_dict, 'spatial': spatial_dict } return list_of_dict except FileNotFound: raise FileNotFound
def load_settings(config_file): config = RawConfigParser() if not config.read(config_file): raise Exception('Not a valid config file: {0!r}'.format(config_file)) if config.has_section('app:main_helper'): settings = dict(config.items('app:main_helper')) else: settings = dict(config.items('app:main')) return settings
def download_vols(volumeIDs, output, username=None, password=None): # create output folder, if nonexistant if not os.path.isdir(output): os.makedirs(output) if not username and not password: path = os.path.expanduser('~') path = os.path.join(path, '.htrc') config = ConfigParser(allow_no_value=True) if os.path.exists(path): config.read(path) if config.has_section('main'): username = config.get("main", "username") password = config.get("main", "password") # If config file is blank, still prompt! if not username and not password: print("Please enter your HathiTrust credentials.") username = input("Token: ") password = input("Password: "******"Save credentials?", default=True) if save: with open(path, 'w') as credential_file: if not config.has_section('main'): config.add_section('main') config.set('main', 'username', username) config.set('main', 'password', password) config.write(credential_file) token = obtainOAuth2Token(username, password) if token is not None: print("obtained token: %s\n" % token) # to get volumes, uncomment next line data = getVolumesFromDataAPI(token, volumeIDs, False) # to get pages, uncomment next line # data = getPagesFromDataAPI(token, pageIDs, False) myzip = ZipFile(StringIO(data)) myzip.extractall(output) myzip.close() else: print("Failed to obtain oauth token.") sys.exit(1)
def _load_handlers_config(self): handlers_file = self.app.config['HANDLERS_FILE'] if not os.path.exists(handlers_file): raise Exception("{} file does not exist".format(handlers_file)) else: config = RawConfigParser() config.read(handlers_file) if not config.has_section('Handlers'): raise Exception("{} does not have a 'Handlers' section") return config
def load_rcfile(self): if self.RCFILE == DEFAULT_SETTINGS['RCFILE'] and \ not os.path.exists(self.RCFILE) and os.path.exists(OLD_RCFILE): sys.stderr.write("Warning: Old rcfile found at %s, please rename to %s.\n" \ % (OLD_RCFILE, self.RCFILE)) self.RCFILE = OLD_RCFILE if os.path.exists(self.RCFILE): config = RawConfigParser() config.optionxform = lambda x: x.upper() config.read(self.RCFILE) items = [] if config.has_section('global'): items.extend(config.items('global')) if self.NAME is not None and config.has_section(self.NAME): items.extend(config.items(self.NAME)) self.load_rcvalues(items) self.update_implications()
def setConfigContent(self, content): conf = RawConfigParser() conf_file = open(self.file_path, 'w') conf_file.write('') conf.read(self.file_path) for section in content: section_items = content[section].items() for option, value in section_items: if conf.has_section(section) == False: conf.add_section(section) conf.set(section, option, value) conf.write(open(self.file_path, "w"))
class IniConfig(object): """This class is used to access/read config file, if it exists. :param config_file: the config file name :type config_file: str or None """ def __init__(self, filename=None): self.config_file = filename self.parser = RawConfigParser() self.load() def load(self, encoding="utf-8"): """Load a config file from the list of paths, if it exists.""" config_file = self.config_file if os.path.isfile(config_file) and os.path.getsize(config_file) > 0: try: if is_py3: self.parser.read(config_file, encoding=encoding) else: self.parser.read(config_file) # print(_("DEBUG: Read configuration file %s") % config_file) except UnicodeDecodeError as e: print (_("Error: Cannot decode configuration file '{0}': {1}").format(config_file, e)) sys.exit(1) def items(self, section): """Return the items list of a section.""" return self.parser.items(section) def has_section(self, section): """Return info about the existence of a section.""" return self.parser.has_section(section) def get_option(self, section, option): """Get the float value of an option, if it exists.""" try: value = self.parser.getfloat(section, option) except NoOptionError: return else: return value def get_raw_option(self, section, option): """Get the raw value of an option, if it exists.""" try: value = self.parser.get(section, option) except NoOptionError: return else: return value
def load_rcfile(self): self.process_args() if self.RCFILE == parser.get_default('RCFILE') and \ not os.path.exists(self.RCFILE) and os.path.exists(OLD_RCFILE): sys.stderr.write("Warning: Using old rcfile found at %s, " "please rename to %s.\n" % (OLD_RCFILE, self.RCFILE)) self.RCFILE = OLD_RCFILE if os.path.exists(self.RCFILE): config = RawConfigParser() config.optionxform = lambda x: x.upper() config.read(self.RCFILE) items = [] if config.has_section('global'): items.extend(config.items('global')) if self.NAME is not None and config.has_section(self.NAME): items.extend(config.items(self.NAME)) return self.parse_rcvalues(items) return {}
class Config(object): """Manage configuration read from a secrets file.""" DEFAULTS = { 'username': None, 'password': None, 'authurl': None, 'read-only': '0', } def __init__(self, secrets_file, default_authurl=None): """ Read configuration from the secrets file. A default_authurl can be provided. """ if default_authurl: Config.DEFAULTS['authurl'] = default_authurl stat = os.stat(secrets_file) if stat.st_mode & 0x004 != 0: log = logging.getLogger(__package__) log.warning("%s is world readable, please consider changing its permissions to 0600" % secrets_file) self.secrets_file = secrets_file self.conf = RawConfigParser(Config.DEFAULTS) self.conf.read(secrets_file) def items(self): """ Generator that returns pairs of container name and a dictionary with the values associated to that contaiener. See Config.DEFAULTS for the valid values. """ for name in self.list_containers(): yield name, self.get_container(name) def get_container(self, name): """ Get a dictionary with the values associated to a container. See Config.DEFAULTS for the valid values. """ if not self.conf.has_section(name): raise ValueError("%s not found in %s" % (name, self.secrets_file)) return dict(self.conf.items(name)) def list_containers(self): """List all container names.""" return self.conf.sections()
class FactorioLocale: def __init__(self): self.conf = RawConfigParser() self.crap = RawConfigParser() def get_name(self, section, name): return self.conf.get(section, name) or '#%s#%s#' % (section, name) def load(self, csv): conf = RawConfigParser() with open(csv, 'rb') as f: input_bytes = f.read() decoded = input_bytes.decode(chardet.detect(input_bytes)['encoding']) decoded = '[__global__]\n' + decoded conf.read_string(decoded) for sec in conf.sections(): if not self.conf.has_section(sec): self.conf.add_section(sec) self.crap.add_section(sec) for k, v in conf.items(sec): is_crap = False if '__' in v: is_crap = True if not is_crap: if self.conf.has_option(sec, k): if self.conf.get(sec, k).lower() != v.lower(): print('Overwriting locale %s (%r -> %r)' % (k, self.conf.get(sec, k), v)) self.conf.set(sec, k, v) else: if self.crap.has_option(sec, k): print('Overwriting crap locale %s (%r -> %r)' % (k, self.crap.get(sec, k), v)) self.crap.set(sec, k, v) def merge(self): for sec in self.crap.sections(): for k, v in self.crap.items(sec): if not self.conf.has_option(sec, k): print('Using crap locale %s (%r)' % (k, v)) self.conf.set(sec, k, v) def save(self, out): with open(out, 'w') as f: self.conf.write(f)
class FactorioLocale: def __init__(self): self.conf = RawConfigParser() self.crap = RawConfigParser() def get_name(self, section, name): return self.conf.get(section, name) or '#%s#%s#' % (section, name) def load(self, csv): conf = RawConfigParser() # utf-8-sig per https://bugs.python.org/issue7185#msg94346 with open(csv, encoding='utf-8-sig') as f: conf.read_file(f) for sec in conf.sections(): if not self.conf.has_section(sec): self.conf.add_section(sec) self.crap.add_section(sec) for k, v in conf.items(sec): is_crap = False if '__' in v: is_crap = True if not is_crap: if self.conf.has_option(sec, k): if self.conf.get(sec, k).lower() != v.lower(): print('Overwriting locale %s (%r -> %r)' % (k, self.conf.get(sec, k), v)) self.conf.set(sec, k, v) else: if self.crap.has_option(sec, k): print('Overwriting crap locale %s (%r -> %r)' % (k, self.crap.get(sec, k), v)) self.crap.set(sec, k, v) def merge(self): for sec in self.crap.sections(): for k, v in self.crap.items(sec): if not self.conf.has_option(sec, k): print('Using crap locale %s (%r)' % (k, v)) self.conf.set(sec, k, v) def save(self, out): with open(out, 'w') as f: self.conf.write(f)
def verify_pki_config_override(cls, filename): """Verify pki config override file * filename must be an absolute path to an existing file * file must be a valid ini file * ini file must not override immutable settings TODO: The checker does not verify config interpolation values, yet. The validator does not have access to all settings. :param filename: path to pki.ini """ if not os.path.isfile(filename): raise ValueError( "Config file '{}' does not exist.".format(filename) ) if not os.path.isabs(filename): raise ValueError( "Config file '{}' is not an absolute path.".format(filename) ) try: cfg = RawConfigParser() with open(filename) as f: cfg.read_file(f) except Exception as e: raise ValueError( "Invalid config '{}': {}".format(filename, e) ) immutable_keys = cls.get_immutable_keys() invalid_keys = set() sections = [cfg.default_section] sections.extend(cls.subsystems) for section in sections: if not cfg.has_section(section): continue for k, _v in cfg.items(section, raw=True): if k in immutable_keys: invalid_keys.add(k) if invalid_keys: raise ValueError( "'{}' overrides immutable options: {}".format( filename, ', '.join(sorted(invalid_keys)) ) )
class Config(object): def __init__(self, config_file): self.config_parser = ConfigParser() self.remove_option = self.config_parser.remove_option self.has_option = self.config_parser.has_option self.add_section = self.config_parser.add_section self.getboolean = self.config_parser.getboolean self.getint = self.config_parser.getint self.getfloat = self.config_parser.getfloat self.options = self.config_parser.options self.items = self.config_parser.items self.config_file = config_file def load(self): self.config_parser.read(self.config_file) def has_option(self, section, option): return self.config_parser.has_option(section, option) def get(self, section, option, default=None, debug=False): try: return self.config_parser.get(section, option) except Exception as e: if debug: print("function get got error: %s" % (e)) traceback.print_exc(file=sys.stdout) return default def set(self, section, option, value, debug=False): if not self.config_parser.has_section(section): if debug: print("Section \"%s\" not exist. create..." % (section)) self.add_section(section) self.config_parser.set(section, option, value) def write(self, given_filepath=None): if given_filepath: f = open(given_filepath, "w") else: f = open(self.config_file, "w") self.config_parser.write(f) f.close()
def load_flake8_config(filename, global_config=False, project_config=False): """ Returns flake8 settings from config file. More info: http://flake8.readthedocs.org/en/latest/config.html """ parser = RawConfigParser() # check global config if global_config and os.path.isfile(DEFAULT_CONFIG_FILE): parser.read(DEFAULT_CONFIG_FILE) # search config in filename dir and all parent dirs if project_config: parent = tail = os.path.abspath(filename) while tail: if parser.read([os.path.join(parent, fn) for fn in CONFIG_FILES]): break parent, tail = os.path.split(parent) result = {} if parser.has_section('flake8'): options = ( ('ignore', 'ignore', 'list'), ('select', 'select', 'list'), ('exclude', 'ignore_files', 'list'), ('max_line_length', 'pep8_max_line_length', 'int') ) for config, plugin, option_type in options: if not parser.has_option('flake8', config): config = config.replace('_', '-') if parser.has_option('flake8', config): if option_type == 'list': option_value = parser.get('flake8', config).strip() if option_value: result[plugin] = option_value.split(',') elif option_type == 'int': option_value = parser.get('flake8', config).strip() if option_value: result[plugin] = parser.getint('flake8', config) return result
def edit_config(filename, settings, dry_run=False): """Edit a configuration file to include `settings` `settings` is a dictionary of dictionaries or ``None`` values, keyed by command/section name. A ``None`` value means to delete the entire section, while a dictionary lists settings to be changed or deleted in that section. A setting of ``None`` means to delete that setting. """ from configparser import RawConfigParser log.debug("Reading configuration from %s", filename) opts = RawConfigParser() opts.read([filename]) for section, options in list(settings.items()): if options is None: log.info("Deleting section [%s] from %s", section, filename) opts.remove_section(section) else: if not opts.has_section(section): log.debug("Adding new section [%s] to %s", section, filename) opts.add_section(section) for option,value in list(options.items()): if value is None: log.debug("Deleting %s.%s from %s", section, option, filename ) opts.remove_option(section,option) if not opts.options(section): log.info("Deleting empty [%s] section from %s", section, filename) opts.remove_section(section) else: log.debug( "Setting %s.%s to %r in %s", section, option, value, filename ) opts.set(section,option,value) log.info("Writing %s", filename) if not dry_run: f = open(filename,'w'); opts.write(f); f.close()
def _merge_from_file(self, config_file): """ Merge variables from ``config_file`` into the environment. Any variables in ``config_file`` that have already been set will be ignored (meaning this method will *not* try to override them, which would raise an exception). If ``config_file`` does not exist or is not a regular file, or if there is an error parsing ``config_file``, ``None`` is returned. Otherwise this method returns a ``(num_set, num_total)`` tuple containing first the number of variables that were actually set, and second the total number of variables found in ``config_file``. Also see `Env._merge()`. :param config_file: Path of the configuration file to load. """ if not path.isfile(config_file): return None parser = RawConfigParser() try: parser.read(config_file) except ParsingError: return None if not parser.has_section(CONFIG_SECTION): parser.add_section(CONFIG_SECTION) items = parser.items(CONFIG_SECTION) if len(items) == 0: return 0, 0 i = 0 for (key, value) in items: if key not in self: self[key] = value i += 1 if 'config_loaded' not in self: # we loaded at least 1 file self['config_loaded'] = True return i, len(items)
class CredentialsFile(object): def __init__(self, section, filename=path.expanduser('~/.aws/credentials')): self._filename = filename self._config = RawConfigParser() self.section = section with open(self._filename, 'r') as f: self._config.readfp(f) if not self._config.has_section(section): raise SystemExit('could not find section [%s] in %r' % (section, filename)) @property def keyId(self): return self._config.get(self.section, 'aws_access_key_id') @property def secretKey(self): return self._config.get(self.section, 'aws_secret_access_key') def updateCredentials(self, keyId, secretKey): """Write new credentials to the config file. I'll also set the umask to 0066, but since I'm the only thing writing files you shouldn't mind. """ self._config.set(self.section, 'aws_access_key_id', keyId) self._config.set(self.section, 'aws_secret_access_key', secretKey) os.umask(0o0066) os.rename(self._filename, self._filename+'~') with open(self._filename, 'w') as f: self._config.write(f)
def get_merged_config(**options): """Get the final merged configuration for supvervisord, as a string. This is the top-level function exported by this module. It combines the config file from the main project with default settings and those specified in the command-line, processes various special section names, and returns the resulting configuration as a string. """ # Find and load the containing project module. # This can be specified explicity using the --project-dir option. # Otherwise, we attempt to guess by looking for the manage.py file. project_dir = options.get("project_dir") if project_dir is None: project_dir = guess_project_dir() # Find the config file to load. # Default to <project-dir>/supervisord.conf. config_file = options.get("config_file") if config_file is None: config_file = os.path.join(project_dir, CONFIG_FILE) # Build the default template context variables. # This is mostly useful information about the project and environment. ctx = { "PROJECT_DIR": project_dir, "PYTHON": os.path.realpath(os.path.abspath(sys.executable)), "SUPERVISOR_OPTIONS": rerender_options(options), "settings": settings, "environ": os.environ, } # Initialise the ConfigParser. # Fortunately for us, ConfigParser has merge-multiple-config-files # functionality built into it. You just read each file in turn, and # values from later files overwrite values from former. cfg = RawConfigParser() # Start from the default configuration options. data = render_config(DEFAULT_CONFIG, ctx).decode("ascii") cfg.readfp(StringIO(data)) # Add in the project-specific config file. with open(config_file, "r") as f: data = render_config(f.read(), ctx) cfg.readfp(StringIO(data.decode("utf-8"))) # Add in the options specified on the command-line. cfg.readfp(StringIO(get_config_from_options(**options))) # Add options from [program:__defaults__] to each program section # if it happens to be missing that option. PROG_DEFAULTS = "program:__defaults__" if cfg.has_section(PROG_DEFAULTS): for option in cfg.options(PROG_DEFAULTS): default = cfg.get(PROG_DEFAULTS, option) for section in cfg.sections(): if section.startswith("program:"): if not cfg.has_option(section, option): cfg.set(section, option, default) cfg.remove_section(PROG_DEFAULTS) # Add options from [program:__overrides__] to each program section # regardless of whether they already have that option. PROG_OVERRIDES = "program:__overrides__" if cfg.has_section(PROG_OVERRIDES): for option in cfg.options(PROG_OVERRIDES): override = cfg.get(PROG_OVERRIDES, option) for section in cfg.sections(): if section.startswith("program:"): cfg.set(section, option, override) cfg.remove_section(PROG_OVERRIDES) # Make sure we've got a port configured for supervisorctl to # talk to supervisord. It's passworded based on secret key. # If they have configured a unix socket then use that, otherwise # use an inet server on localhost at fixed-but-randomish port. username = hashlib.md5(settings.SECRET_KEY.encode("utf-8")).hexdigest()[:7] password = hashlib.md5(username.encode("utf-8")).hexdigest() if cfg.has_section("unix_http_server"): set_if_missing(cfg, "unix_http_server", "username", username) set_if_missing(cfg, "unix_http_server", "password", password) serverurl = "unix://" + cfg.get("unix_http_server", "file") else: # This picks a "random" port in the 9000 range to listen on. # It's derived from the secret key, so it's stable for a given # project but multiple projects are unlikely to collide. port = int(hashlib.md5(password.encode("utf-8")).hexdigest()[:3], 16) % 1000 addr = "127.0.0.1:9%03d" % (port, ) set_if_missing(cfg, "inet_http_server", "port", addr) set_if_missing(cfg, "inet_http_server", "username", username) set_if_missing(cfg, "inet_http_server", "password", password) serverurl = "http://" + cfg.get("inet_http_server", "port") set_if_missing(cfg, "supervisorctl", "serverurl", serverurl) set_if_missing(cfg, "supervisorctl", "username", username) set_if_missing(cfg, "supervisorctl", "password", password) set_if_missing(cfg, "rpcinterface:supervisor", "supervisor.rpcinterface_factory", "supervisor.rpcinterface:make_main_rpcinterface") # Remove any [program:] sections with exclude=true for section in cfg.sections(): try: if cfg.getboolean(section, "exclude"): cfg.remove_section(section) except NoOptionError: pass # Sanity-check to give better error messages. for section in cfg.sections(): if section.startswith("program:"): if not cfg.has_option(section, "command"): msg = "Process name '%s' has no command configured" raise ValueError(msg % (section.split(":", 1)[-1])) # Write it out to a StringIO and return the data s = StringIO() cfg.write(s) return s.getvalue()
class Config(object): """ Manages Bonsai configuration environments. Configuration information is pulled from different locations. This class helps keep it organized. Configuration information comes from environment variables, the user `~./.bonsai` file, a local `./.bonsai` file, the `./.brains` file, command line arguments, and finally, parameters overridden in code. An optional `profile` key can be used to switch between different profiles stored in the `~/.bonsai` configuration file. The users active profile is selected if none is specified. Attributes: accesskey: Users access key from the web. (Example: 00000000-1111-2222-3333-000000000001) workspace_id: Users login name. url: URL of the server to connect to. (Example: "https://api.bons.ai") brain: Name of the BRAIN to use. predict: True is predicting against a BRAIN, False for training. brain_version: Version number of the brain to use for prediction. proxy: Server name and port number of proxy to connect through. (Example: "localhost:9000") Example Usage: import sys, bonsai_ai config = bonsai_ai.Config(sys.argv) print(config) if config.predict: ... """ def __init__(self, argv: List[str] = sys.argv, profile: Any = None, use_aad: bool = False, require_workspace: bool = True): """ Construct Config object with program arguments. Pass in sys.argv for command-line arguments and an optional profile name to select a specific profile. Arguments: argv: A list of argument strings. profile: The name of a profile to select. (optional) control_plane_auth: Instance will be used on control plane use_aad: Use AAD authentication """ self.accesskey = None self.workspace_id = None self.tenant_id = None self.url = None self.gateway_url = None self.use_color = True self.use_aad = use_aad self.sdk3 = False self.brain = None self.predict = False self.brain_version = 0 self._proxy = None self._retry_timeout_seconds = 300 self._network_timeout_seconds = 60 self.verbose = False self.record_file = None self.record_enabled = False self.file_paths = set() self._config_parser = RawConfigParser(allow_no_value=True) self._read_config() self.profile = profile self._parse_env() self._parse_config(_DEFAULT) self._parse_config(profile) self._parse_brains() self._parse_args(argv) # parse args works differently in 2.7 if sys.version_info >= (3, 0): self._parse_legacy(argv) self.aad_client = AADClient(self.tenant_id) self.accesskey = self.aad_client.get_access_token() def __repr__(self): """ Prints out a JSON formatted string of the Config state. """ return '{{'\ '\"profile\": \"{self.profile!r}\", ' \ '\"accesskey\": \"{self.accesskey!r}\", ' \ '\"workspace_id\": \"{self.workspace_id!r}\", ' \ '\"brain\": \"{self.brain!r}\", ' \ '\"url\": \"{self.url!r}\", ' \ '\"use_color\": \"{self.use_color!r}\", ' \ '\"predict\": \"{self.predict!r}\", ' \ '\"brain_version\": \"{self.brain_version!r}\", ' \ '\"proxy\": \"{self.proxy!r}\", ' \ '\"retry_timeout\": \"{self.retry_timeout!r}\", ' \ '\"network_timeout\": \"{self.network_timeout!r}\" ' \ '}}'.format(self=self) @property def proxy(self): # shell-local environment vars get top precedence, falling back to # OS-specific registry/configuration values if self._proxy is not None: return self._proxy proxy_dict = getproxies() proxy = proxy_dict.get(_ALL_PROXY, None) http_proxy = proxy_dict.get(_HTTP_PROXY, None) if http_proxy is not None: proxy = http_proxy if self.url is not None: uri = urlparse(self.url) if uri.scheme == 'https': https_proxy = proxy_dict.get(_HTTPS_PROXY, None) if https_proxy is not None: proxy = https_proxy return proxy @proxy.setter def proxy(self, proxy: str): uri = urlparse(proxy) uri.port self._proxy = proxy @property def record_format(self): """ The log record format, as inferred from the extension of the log filename""" if self.record_file: _, fmt = splitext(self.record_file) return fmt else: return None @property def retry_timeout(self): return self._retry_timeout_seconds @retry_timeout.setter def retry_timeout(self, value: int): if value < -1: raise ValueError( 'Retry timeout must be a positive integer, 0, or -1.') self._retry_timeout_seconds = value @property def network_timeout(self): return self._network_timeout_seconds @network_timeout.setter def network_timeout(self, value: int): if value < 1: raise ValueError('Network timeout must be a positive integer.') self._network_timeout_seconds = value def refresh_access_token(self): if self.aad_client: self.accesskey = self.aad_client.get_access_token() if not self.accesskey: raise AuthenticationError( 'Could not refresh AAD bearer token.') def _parse_env(self): ''' parse out environment variables used in hosted containers ''' self.brain = environ.get(_BONSAI_TRAIN_BRAIN, None) headless = environ.get(_BONSAI_HEADLESS, None) if headless == 'True': self.headless = True def _parse_config(self, profile: Optional[str]): ''' parse both the '~/.bonsai' and './.bonsai' config files. ''' # read the values def assign_key(key: str): if self._config_parser.has_option(section, key): if key.lower() == _USE_COLOR.lower(): self.__dict__[key] = self._config_parser.getboolean( section, key) else: self.__dict__[key] = self._config_parser.get(section, key) # get the profile section = _DEFAULT if profile is None: if self._config_parser.has_option(_DEFAULT, _PROFILE): section = self._config_parser.get(_DEFAULT, _PROFILE) self.profile = section else: section = profile assign_key(_ACCESSKEY) assign_key(_WORKSPACEID) assign_key(_TENANTID) assign_key(_SUBSCRIPTION) assign_key(_RESOURCEGROUP) assign_key(_URL) assign_key(_GATEWAYURL) assign_key(_PROXY) assign_key(_USE_COLOR) # if url is none set it to default bonsai api url if self.url is None: self.url = _DEFAULT_URL elif not urlparse(self.url).scheme: # if no url scheme is supplied, assume https self.url = 'https://{}'.format(self.url) def _parse_brains(self): ''' parse the './.brains' config file Example: {"brains": [{"default": true, "name": "test"}]} ''' data = {} try: with open(_DOT_BRAINS) as file: data = json.load(file) # parse file now for brain in data['brains']: if brain['default'] is True: self.brain = brain['name'] return # except FileNotFoundError: python3 except IOError: return def _parse_legacy(self, argv: List[str]): ''' print support for legacy CLI arguments ''' if sys.version_info >= (3, 0): optional = ArgumentParser(description="", allow_abbrev=False, add_help=False) else: optional = ArgumentParser(description="", add_help=False) optional.add_argument('--legacy', action='store_true', help='Legacy command line options') optional.add_argument('--train-brain', help=_TRAIN_BRAIN_HELP) optional.add_argument('--predict-brain', help=_PREDICT_BRAIN_HELP) optional.add_argument('--predict-version', help=_PREDICT_VERSION_HELP) optional.add_argument('--recording-file', help=_RECORDING_FILE_HELP) args, remainder = optional.parse_known_args(argv) if args.train_brain is not None: self.brain = args.train_brain self.predict = False if args.predict_version is not None: self.predict = True if args.predict_version == "latest": self.brain_version = 0 else: self.brain_version = int(args.predict_version) if remainder is not None: pass def _parse_args(self, argv: List[str]): ''' parser command line arguments ''' if sys.version_info >= (3, 0): parser = ArgumentParser(allow_abbrev=False) else: parser = ArgumentParser() parser.add_argument('--accesskey', '--access-key', help=_ACCESS_KEY_HELP) parser.add_argument('--workspace_id', help=_WORKSPACE_ID_HELP) parser.add_argument('--tenant_id', help=_TENANT_ID_HELP) parser.add_argument('--subscription', help=_SUBSCRIPTION_HELP) parser.add_argument('--resource_group', help=_RESOURCE_GROUP_HELP) parser.add_argument('--url', help=_URL_HELP) parser.add_argument('--gateway_url', help=_GATEWAY_URL_HELP) parser.add_argument('--proxy', help=_PROXY_HELP) parser.add_argument('--brain', help=_BRAIN_HELP) parser.add_argument('--predict', help=_PREDICT_HELP, nargs='?', const='latest', default=None) parser.add_argument('--aad', action='store_true', help=_AAD_HELP) parser.add_argument('--verbose', action='store_true', help=_VERBOSE_HELP) parser.add_argument('--performance', action='store_true', help=_PERFORMANCE_HELP) parser.add_argument('--log', nargs='+', help=_LOG_HELP) parser.add_argument('--record', nargs=1, default=None, help=_RECORD_HELP) parser.add_argument('--retry-timeout', type=int, help=_RETRY_TIMEOUT_HELP) parser.add_argument('--network-timeout', type=int, help=_NETWORK_TIMEOUT_HELP) parser.add_argument('--sdk3', action='store_true', help=_SDK3_HELP) args, remainder = parser.parse_known_args(argv[1:]) if args.aad: self.use_aad = args.aad if args.accesskey is not None: self.accesskey = args.accesskey if args.workspace_id is not None: self.workspace_id = args.workspace_id if args.tenant_id is not None: self.tenant_id = args.tenant_id if args.subscription is not None: self.subscription = args.subscription if args.resource_group is not None: self.resource_group = args.resource_group if args.url is not None: self.url = args.url if args.gateway_url is not None: self.gateway_url = args.url if args.proxy is not None: self.proxy = args.proxy if args.brain is not None: self.brain = args.brain if args.verbose: self.verbose = args.verbose log.set_enable_all(args.verbose) if args.performance: # logging::log().set_enabled(true); # logging::log().set_enable_all_perf(true); pass if args.log is not None: for domain in args.log: log.set_enabled(domain) if args.record: self.record_file = args.record[0] self.record_enabled = True if args.retry_timeout is not None: self.retry_timeout = args.retry_timeout if args.network_timeout is not None: self.network_timeout = args.network_timeout if args.sdk3: self.sdk3 = True if sys.version_info < (3, 6): raise RuntimeError('Use of the --sdk3 flag requires ' 'Python 3.6 or greater') brain_version = None if args.predict is not None: if args.predict == "latest": brain_version = 0 else: brain_version = args.predict self.predict = True # update brain_version after all args have been processed if brain_version is not None: brain_version = int(brain_version) if brain_version < 0: raise ValueError('BRAIN version number must be' 'positive integer or "latest".') self.brain_version = brain_version if remainder is not None: pass def _config_files(self): return [join(expanduser('~'), _DOT_BONSAI), join('.', _DOT_BONSAI)] def _read_config(self): # verify that at least one of the config files exists # as RawConfigParser ignores missing files found = False config_files = self._config_files() for path in config_files: if os.access(path, os.R_OK): found = True break if not found: # Write empty .bonsai to disk if no file is found self._write_dot_bonsai() self._config_parser.read(config_files) for path in config_files: if os.path.exists(path): self.file_paths.add(path) def _set_profile(self, section: Any): # Create section if it does not exist if not self._config_parser.has_section( section) and section != _DEFAULT: self._config_parser.add_section(section) # Set profile in class and config self.profile = section if section == _DEFAULT: self._config_parser.set(_DEFAULT, _PROFILE, 'DEFAULT') else: self._config_parser.set(_DEFAULT, _PROFILE, str(section)) def _write_dot_bonsai(self): """ Writes to .bonsai in users home directory """ config_path = join(expanduser('~'), _DOT_BONSAI) try: with open(config_path, 'w') as f: self._config_parser.write(f) except (FileNotFoundError, PermissionError): log.info( 'WARNING: Unable to write .bonsai to {}'.format(config_path)) def websocket_url(self): """ Converts api url to websocket url """ api_url = self.url or '' parsed_api_url = urlparse(api_url) if parsed_api_url.scheme == 'http': parsed_ws_url = parsed_api_url._replace(scheme='ws') elif parsed_api_url.scheme == 'https': parsed_ws_url = parsed_api_url._replace(scheme='wss') else: return None ws_url = urlunparse(parsed_ws_url) return ws_url def has_section(self, section: str): """Checks the configuration to see if section exists.""" if section == _DEFAULT: return True return self._config_parser.has_section(section) def section_list(self): """ Returns a list of sections in config """ return self._config_parser.sections() def section_items(self, section: str): """ Returns a dictionary of items in a section """ return self._config_parser.items(section) def defaults(self): """ Returns an ordered dict of items in the DEFAULT section """ return self._config_parser.defaults() def update(self, **kwargs: Any): """ Updates the configuration with the Key/value pairs in kwargs and writes to the .bonsai file in the users home directory. """ if not kwargs: return for key, value in kwargs.items(): if key.lower() == _PROFILE.lower(): self._set_profile(value) else: try: self._config_parser.set(self.profile, key, str(value)) except NoSectionError: # Create and set default profile if it does not exist in .bonsai self._set_profile(self.profile) self._config_parser.set(self.profile, key, str(value)) self._write_dot_bonsai() self._parse_config(self.profile)
class ConfigEditor: def __init__(self): self.config_parser = RawConfigParser() self.config_file = None def open(self, config_file): """ Open and read a config file :param config_file: config file path """ logger.debug('Opening {}'.format(config_file)) self.config_file = config_file if os.access(config_file, os.R_OK): logger.debug('Parsing config file') self.config_parser.read(config_file) def read(self, section, option, fallback=None): """ Returns a config option value from config file :param section: section where the option is stored :param option: option name :param fallback: (optional) fallback value :return: a config option value :rtype: string """ if self.config_file == None: raise InvalidOperation('read') if fallback is None: return self.config_parser.get(section, option) else: return self.config_parser.get(section, option, fallback=fallback) def readboolean(self, section, option, fallback=False): """ Returns a boolean config option value from config file :param section: section where the option is stored :param option: option name :param fallback: (optional) fallback value :return: a config option value :rtype: boolean """ if self.config_file == None: raise InvalidOperation('readboolean') return self.config_parser.getboolean(section, option, fallback=fallback) def write(self, section, option, value): """ Write a config option value in config object :param section: section where the option is stored :param option: option name :param value: option value """ if self.config_file == None: raise InvalidOperation('write') if section != 'DEFAULT' and not self.config_parser.has_section( section): logger.debug('Adding new section {}'.format(section)) self.config_parser.add_section(section) logger.debug('Adding {}.{} with value {}'.format( section, option, value)) self.config_parser.set(section, option, value) def remove(self, section, option): """ Remove a config option in config object :param section: section where the option is stored :param option: option name :return: True if option is removed, False if not exist :rtype: boolean """ if self.config_file == None: raise InvalidOperation('remove') logger.debug('Removing {}.{}'.format(section, option)) option_removed = self.config_parser.remove_option(section, option) if section != 'DEFAULT' and option_removed: if self.config_parser.items(section) == self.config_parser.items( 'DEFAULT'): logger.debug('Removing empty section {}'.format(section)) self.config_parser.remove_section(section) return option_removed def remove_project(self, project): """ Remove a project (config section in config object) :param project: section name :return: True if section is removed, False if not exist :rtype: boolean """ if self.config_file == None: raise InvalidOperation('remove') logger.debug('Removing {}'.format(project)) return self.config_parser.remove_section(project) def list(self): """ List config sections :return: list of projects (sections in config) :rtype: list """ if self.config_file == None: raise InvalidOperation('list') return self.config_parser.sections() def list_enabled_projects(self): """ Get the list of enabled projects :return: list of enabled projects :rtype: list """ if self.config_file == None: raise InvalidOperation('list_enabled_projects') try: return self.config_parser.get('DEFAULT', 'sync_projects').split() except NoOptionError: return [] def enable_project(self, project): """ Enable a project adding it to sync_projects :param project: project name """ if self.config_file == None: raise InvalidOperation('enable_project') logger.debug('Enabling project {}'.format(project)) enabled_projects = self.list_enabled_projects() enabled_projects.append(project) enabled_projects.sort() self.config_parser.set('DEFAULT', 'sync_projects', ' '.join(enabled_projects)) def disable_project(self, project): """ Disable a project removing it from sync_projects :param project: project name :return: True if project is disabled, False if not :rtype: boolean """ if self.config_file == None: raise InvalidOperation('disable_project') logger.debug('Disabling project {}'.format(project)) enabled_projects = self.list_enabled_projects() try: enabled_projects.remove(project) self.config_parser.set('DEFAULT', 'sync_projects', ' '.join(enabled_projects)) return True except ValueError: logger.debug( 'Nothing to do, {} is not in enabled projects'.format(project)) return False def has_project(self, project): """ Check if a project (a section in config) is present :param project: section name :return: True if section exists, False if not :rtype: boolean """ if self.config_file == None: raise InvalidOperation('has_project') return self.config_parser.has_section(project) def has_project_enabled(self, project): """ Check if a project is enabled :param project: project name :return: True if project is enabled, False if not :rtype: boolean """ if self.config_file == None: raise InvalidOperation('has_project_enabled') return True if project in self.list_enabled_projects() else False def save(self): """ Save the config object in config file """ if self.config_file == None: raise InvalidOperation('save') logger.debug('Saving config in config file') with open(self.config_file, 'w') as configfile: self.config_parser.write(configfile) self.config_file = None def clean(self): """ Cleans the config editor """ logger.debug('Cleaning config editor') self.config_parser = RawConfigParser() self.config_file = None
class OktaAuthConfig(): """ Config helper class """ def __init__(self, logger): self.logger = logger self.config_path = os.path.expanduser('~') + '/.okta-aws' self._value = RawConfigParser() self._value.read(self.config_path) @staticmethod def configure(logger): value = RawConfigParser() config_path = os.path.expanduser('~') + '/.okta-aws' append = False if os.path.exists(config_path): value.read(config_path) print(f"You have preconfigured Okta profiles: {value.sections()}") print(f"This command will append new profile to the existing {config_path} config file") append = True else: print(f"This command will create a new {config_path} config file") confirm = input('Would you like to proceed? [y/n]: ') if confirm == 'y': logger.info(f"Creating new {config_path} file") okta_profile = input('Enter Okta profile name: ') if not okta_profile: okta_profile = 'default' profile = input('Enter AWS profile name: ') base_url = input('Enter Okta base url [your main organisation Okta url]: ') username = input('Enter Okta username: '******'Enter AWS app-link [optional]: ') duration = input('Duration in seconds to request a session token for [Default=3600]: ') if not duration: duration = 3600 value.add_section(okta_profile) value.set(okta_profile, 'base-url', base_url) value.set(okta_profile, 'profile', profile) value.set(okta_profile, 'username', username) if app_link: value.set(okta_profile, 'app-link', app_link) value.set(okta_profile, 'duration', duration) if append: with open(config_path, 'a') as configfile: value.write(configfile) else: with open(config_path, 'w+') as configfile: value.write(configfile) print(f"File {config_path} successfully created. Now you can authenticate to Okta") print(f"Execute 'okta-awscli -o {okta_profile} -p {profile} sts get-caller-identity' to authenticate and retrieve credentials") sys.exit(0) else: sys.exit(0) def base_url_for(self, okta_profile): """ Gets base URL from config """ if self._value.has_option(okta_profile, 'base-url'): base_url = self._value.get(okta_profile, 'base-url') self.logger.info("Authenticating to: %s" % base_url) elif self._value.has_option('default', 'base-url'): base_url = self._value.get('default', 'base-url') self.logger.info( "Using base-url from default profile %s" % base_url ) else: self.logger.error( "No profile found. Please define a default profile, or specify a named profile using `--okta-profile`" ) sys.exit(1) return base_url def app_link_for(self, okta_profile): """ Gets app_link from config """ app_link = None if self._value.has_option(okta_profile, 'app-link'): app_link = self._value.get(okta_profile, 'app-link') elif self._value.has_option('default', 'app-link'): app_link = self._value.get('default', 'app-link') if app_link: try: if not validators.url(app_link): self.logger.error("The app-link provided: %s is an invalid url" % app_link) sys.exit(-1) except TypeError as ex: self.logger.error("Malformed string in app link URL. Ensure there are no invalid characters.") self.logger.info("App Link set as: %s" % app_link) return app_link else: self.logger.error("The app-link is missing. Will try to retrieve it from Okta") return None def username_for(self, okta_profile): """ Gets username from config """ if self._value.has_option(okta_profile, 'username'): username = self._value.get(okta_profile, 'username') self.logger.info("Authenticating as: %s" % username) else: username = input('Enter username: '******'password'): password = self._value.get(okta_profile, 'password') else: password = getpass('Enter password: '******'factor'): factor = self._value.get(okta_profile, 'factor') self.logger.debug("Setting MFA factor to %s" % factor) return factor return None def duration_for(self, okta_profile): """ Gets requested duration from config, ignore it on failure """ if self._value.has_option(okta_profile, 'duration'): duration = self._value.get(okta_profile, 'duration') self.logger.debug( "Requesting a duration of %s seconds" % duration ) try: return int(duration) except ValueError: self.logger.warn( "Duration could not be converted to a number," " ignoring." ) return None def write_role_to_profile(self, okta_profile, role_arn): """ Saves role to profile in config """ if not self._value.has_section(okta_profile): self._value.add_section(okta_profile) base_url = self.base_url_for(okta_profile) self._value.set(okta_profile, 'base-url', base_url) self._value.set(okta_profile, 'role', role_arn) with open(self.config_path, 'w+') as configfile: self._value.write(configfile) def write_applink_to_profile(self, okta_profile, app_link): """ Saves app link to profile in config """ if not self._value.has_section(okta_profile): self._value.add_section(okta_profile) base_url = self.base_url_for(okta_profile) self._value.set(okta_profile, 'base-url', base_url) self._value.set(okta_profile, 'app-link', app_link) with open(self.config_path, 'w+') as configfile: self._value.write(configfile) @staticmethod def get_okta_profiles(): value = RawConfigParser() config_path = os.path.expanduser('~') + '/.okta-aws' value.read(config_path) return value.sections()
def import_eds(source, node_id): eds = RawConfigParser() if hasattr(source, "read"): fp = source else: fp = open(source) try: # Python 3 eds.read_file(fp) except AttributeError: # Python 2 eds.readfp(fp) fp.close() od = objectdictionary.ObjectDictionary() if eds.has_section("DeviceComissioning"): od.bitrate = int(eds.get("DeviceComissioning", "Baudrate")) * 1000 od.node_id = int(eds.get("DeviceComissioning", "NodeID")) for section in eds.sections(): # Match dummy definitions match = re.match(r"^[Dd]ummy[Uu]sage$", section) if match is not None: for i in range(1, 8): key = "Dummy%04d" % i if eds.getint(section, key) == 1: var = objectdictionary.Variable(key, i, 0) var.data_type = i var.access_type = "const" od.add_object(var) # Match indexes match = re.match(r"^[0-9A-Fa-f]{4}$", section) if match is not None: index = int(section, 16) name = eds.get(section, "ParameterName") try: object_type = int(eds.get(section, "ObjectType"), 0) except NoOptionError: # DS306 4.6.3.2 object description # If the keyword ObjectType is missing, this is regarded as # "ObjectType=0x7" (=VAR). object_type = VAR if object_type in (VAR, DOMAIN): var = build_variable(eds, section, node_id, index) od.add_object(var) elif object_type == ARR and eds.has_option(section, "CompactSubObj"): arr = objectdictionary.Array(name, index) last_subindex = objectdictionary.Variable( "Number of entries", index, 0) last_subindex.data_type = objectdictionary.UNSIGNED8 arr.add_member(last_subindex) arr.add_member(build_variable(eds, section, node_id, index, 1)) od.add_object(arr) elif object_type == ARR: arr = objectdictionary.Array(name, index) od.add_object(arr) elif object_type == RECORD: record = objectdictionary.Record(name, index) od.add_object(record) continue # Match subindexes match = re.match(r"^([0-9A-Fa-f]{4})[S|s]ub([0-9A-Fa-f]+)$", section) if match is not None: index = int(match.group(1), 16) subindex = int(match.group(2), 16) entry = od[index] if isinstance(entry, (objectdictionary.Record, objectdictionary.Array)): var = build_variable(eds, section, node_id, index, subindex) entry.add_member(var) # Match [index]Name match = re.match(r"^([0-9A-Fa-f]{4})Name", section) if match is not None: index = int(match.group(1), 16) num_of_entries = int(eds.get(section, "NrOfEntries")) entry = od[index] # For CompactSubObj index 1 is were we find the variable src_var = od[index][1] for subindex in range(1, num_of_entries + 1): var = copy_variable(eds, section, subindex, src_var) if var is not None: entry.add_member(var) return od
class Config(): """Handle credentials configuration.""" def __init__(self, cfg_file='~/.credentials'): """ Create an instance of a dictionary-like configuration object. :param cfg_file: The path to the RawConfigParser compatible config file """ self._cfg = RawConfigParser() self._cfg.read(os.path.expanduser(cfg_file)) self._cfg_file = cfg_file def __getitem__(self, section): """ Get the named tuple representing the configuration held at `section`. Build a named tuple representing the configuration at `section`. If a config file does not have an option for the section ignore it. Resulting in an AttributeError if accessed later in the code. :param section: the section to retrieve """ def _getattr_wrapper(t, attr): """ Replace the standard __getattr__ functionality. In the case when a section and/or attribute is not set in the config file, the error shown will be more helpful. """ try: return t.__getattribute__(attr) except AttributeError as exc: exc.args = ( ( f'Unable to locate attribute "{attr}" ' f'in section "{type(t).__name__}" ' f'at config file "{self._cfg_file}"' ), ) raise exc env_vars = [ k for k in os.environ.keys() if k.startswith(f'{section.upper()}_') ] env_keys = [ k.split(section.upper())[1].lstrip('_').lower() for k in env_vars ] env_values = [os.environ[e] for e in env_vars] if env_vars: logger.debug( f'Loading credentials from ENV vars: {", ".join(env_vars)}' ) params = [] if self._cfg.has_section(section): params = self._cfg.options(section) values = [self._cfg.get(section, x) for x in params] d = OrderedDict(zip(params, values)) if env_vars: d.update(zip(env_keys, env_values)) t = namedtuple(section, ' '.join(list(d.keys()))) t.__getattr__ = _getattr_wrapper return t(*list(d.values())) def get(self, section, key=None, account=None, default=_sentinel): """ Retrieve sections and keys by account. ``section``: the section from which to retrieve keys. ``key``: the key in the section whose value you want to retrieve. if not specified, returns the whole section as a dictionary. ``account``: if provided, fetches the value for the specific account. assumes the account is prefixed to the key and separated by _. ``default``: if provided, returns this value if a value cannot be found; otherwise raises an exception. """ if key is None: return self[section] if account: key = '_'.join([account, key]) if default == _sentinel: return getattr(self[section], key) else: return getattr(self[section], key, default)
'password': '******', 'port': '5432', 'user': '******', }, 'redis': { 'host': 'localhost', 'port': '6379', 'password': '', }, 'sentry': { 'dsn': '', # Please ensure that Python Sentry SDK is installed. }, } for section, options in config_defaults.items(): if not config.has_section(section): config.add_section(section) for option, value in options.items(): if not config.has_option(section, option): config.set(section, option, value) if config.get('redis', 'password'): redis_url = 'redis://:%s@%s:%s' % (config.get('redis', 'password'), config.get('redis', 'host'), config.get('redis', 'port')) else: redis_url = 'redis://%s:%s' % (config.get('redis', 'host'), config.get('redis', 'port')) # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = config.get('global', 'secret_key')
class APIVersionWriter(TemplateFileWriter): """ Provide usefull method to write Objj files. """ def __init__(self, monolithe_config, api_info): """ """ super(APIVersionWriter, self).__init__(package="monolithe.generators.lang.objj") self.api_version = api_info["version"] self.api_root = api_info["root"] self.api_prefix = api_info["prefix"] self.monolithe_config = monolithe_config self._output = self.monolithe_config.get_option( "output", "transformer") self._transformation_name = self.monolithe_config.get_option( "name", "transformer") self._class_prefix = self.monolithe_config.get_option( "class_prefix", "transformer") self._product_accronym = self.monolithe_config.get_option( "product_accronym") self._product_name = self.monolithe_config.get_option("product_name") self.output_directory = "%s/objj/" % (self._output) self.override_folder = os.path.normpath("%s/../../__overrides" % self.output_directory) self.fetchers_path = "/Fetchers/" self.attrs_defaults = RawConfigParser() path = "%s/objj/__attributes_defaults/attrs_defaults.ini" % self._output self.attrs_defaults.optionxform = str self.attrs_defaults.read(path) with open("%s/objj/__code_header" % self._output, "r") as f: self.header_content = f.read() def perform(self, specifications): """ """ self.model_filenames = dict() self.fetcher_filenames = dict() task_manager = TaskManager() for rest_name, specification in specifications.items(): task_manager.start_task(method=self._write_model, specification=specification, specification_set=specifications) task_manager.start_task(method=self._write_fetcher, specification=specification, specification_set=specifications) task_manager.wait_until_exit() self._write_init_models(filenames=self.model_filenames) self._write_init_fetchers(filenames=self.fetcher_filenames) def _write_model(self, specification, specification_set): """ """ filename = "%s%s.j" % (self._class_prefix, specification.entity_name) override_content = self._extract_override_content( specification.entity_name) constants = self._extract_constants(specification) superclass_name = "NURESTAbstractRoot" if specification.rest_name == self.api_root else "NURESTObject" defaults = {} section = "%s%s" % (self._class_prefix, specification.entity_name) if self.attrs_defaults.has_section(section): for attribute in self.attrs_defaults.options(section): defaults[attribute] = self.attrs_defaults.get( section, attribute) self.write(destination=self.output_directory, filename=filename, template_name="ObjectModel.j.tpl", specification=specification, specification_set=specification_set, version=self.api_version, class_prefix=self._class_prefix, product_accronym=self._product_accronym, override_content=override_content, superclass_name=superclass_name, constants=constants, header=self.header_content, attribute_defaults=defaults) self.model_filenames[filename] = specification.entity_name def _write_init_models(self, filenames): """ """ filename = "Models.j" ordered = OrderedDict(sorted(filenames.items())) self.write(destination=self.output_directory, filename=filename, template_name="Models.j.tpl", filenames=ordered, class_prefix=self._class_prefix, header=self.header_content) def _write_fetcher(self, specification, specification_set): """ """ destination = "%s/%s" % (self.output_directory, self.fetchers_path) base_name = "%sFetcher" % specification.entity_name_plural filename = "%s%s.j" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=destination, filename=filename, template_name="ObjectFetcher.j.tpl", specification=specification, class_prefix=self._class_prefix, override_content=override_content, header=self.header_content) self.fetcher_filenames[filename] = specification.entity_name_plural def _write_init_fetchers(self, filenames): """ """ filename = "Fetchers/Fetchers.j" ordered = OrderedDict(sorted(filenames.items())) self.write(destination=self.output_directory, filename=filename, template_name="Fetchers.j.tpl", filenames=ordered, class_prefix=self._class_prefix, header=self.header_content) def _extract_override_content(self, name): """ """ # find override file specific_override_path = "%s/%s_%s%s.override.py" % ( self.override_folder, self.api_version, self._class_prefix.lower(), name.lower()) generic_override_path = "%s/%s%s.override.py" % ( self.override_folder, self._class_prefix.lower(), name.lower()) final_path = specific_override_path if os.path.exists( specific_override_path) else generic_override_path # Read override from file override_content = None if os.path.isfile(final_path): override_content = open(final_path).read() return override_content def _extract_constants(self, specification): """ Removes attributes and computes constants """ constants = {} for attribute in specification.attributes: if attribute.allowed_choices and len( attribute.allowed_choices) > 0: name = attribute.local_name name = name[:1].upper() + name[1:] for choice in attribute.allowed_choices: constants["%s%s%s_%s" % (self._class_prefix, specification.entity_name, name, choice.upper())] = choice return constants
def main(): global OPTIONS CONFIG_SECTION = 'alerta-mailer' config_file = os.environ.get('ALERTA_CONF_FILE') or DEFAULT_OPTIONS[ 'config_file'] # nopep8 # Convert default booleans to its string type, otherwise config.getboolean fails # nopep8 defopts = { k: str(v) if type(v) is bool else v for k, v in DEFAULT_OPTIONS.items() } # nopep8 config = RawConfigParser(defaults=defopts) if os.path.exists("{}.d".format(config_file)): config_path = "{}.d".format(config_file) config_list = [] for files in os.walk(config_path): for filename in files[2]: config_list.append("{}/{}".format(config_path, filename)) config_list.append(os.path.expanduser(config_file)) config_file = config_list try: # No need to expanduser if we got a list (already done sooner) # Morever expanduser does not accept a list. if isinstance(config_file, list): config.read(config_file) else: config.read(os.path.expanduser(config_file)) except Exception as e: LOG.warning( "Problem reading configuration file %s - is this an ini file?", config_file) # nopep8 sys.exit(1) if config.has_section(CONFIG_SECTION): NoneType = type(None) config_getters = { NoneType: config.get, str: config.get, int: config.getint, float: config.getfloat, bool: config.getboolean, list: lambda s, o: [e.strip() for e in config.get(s, o).split(',')] if len(config.get(s, o)) else [] } for opt in DEFAULT_OPTIONS: # Convert the options to the expected type OPTIONS[opt] = config_getters[type(DEFAULT_OPTIONS[opt])]( CONFIG_SECTION, opt) # nopep8 else: sys.stderr.write( 'Alerta configuration section not found in configuration file\n' ) # nopep8 OPTIONS = defopts.copy() OPTIONS['endpoint'] = os.environ.get('ALERTA_ENDPOINT') or OPTIONS[ 'endpoint'] # nopep8 OPTIONS['key'] = os.environ.get('ALERTA_API_KEY') or OPTIONS['key'] OPTIONS['smtp_username'] = os.environ.get( 'SMTP_USERNAME') or OPTIONS['smtp_username'] or OPTIONS['mail_from'] OPTIONS['smtp_password'] = os.environ.get('SMTP_PASSWORD') or OPTIONS[ 'smtp_password'] # nopep8 if os.environ.get('DEBUG'): OPTIONS['debug'] = True if isinstance(config_file, list): group_rules = [] for file in config_file: group_rules.extend(parse_group_rules(file)) else: group_rules = parse_group_rules(config_file) if group_rules is not None: OPTIONS['group_rules'] = group_rules # Registering action for SIGTERM signal handling signal.signal(signal.SIGTERM, on_sigterm) try: mailer = MailSender() mailer.start() except (SystemExit, KeyboardInterrupt): sys.exit(0) except Exception as e: print(str(e)) sys.exit(1) from kombu.utils.debug import setup_logging loginfo = 'DEBUG' if OPTIONS['debug'] else 'INFO' setup_logging(loglevel=loginfo, loggers=['']) with Connection(OPTIONS['amqp_url']) as conn: try: consumer = FanoutConsumer(connection=conn) consumer.run() except (SystemExit, KeyboardInterrupt): mailer.should_stop = True mailer.join() sys.exit(0) except Exception as e: print(str(e)) sys.exit(1)
class OktaAuthConfig(): """ Config helper class """ def __init__(self, logger): self.logger = logger self.config_path = os.path.expanduser('~') + '/.okta-aws' self._value = RawConfigParser() self._value.read(self.config_path) def base_url_for(self, okta_profile): """ Gets base URL from config """ if self._value.has_option(okta_profile, 'base-url'): base_url = self._value.get(okta_profile, 'base-url') self.logger.info("Authenticating to: %s" % base_url) elif self._value.has_option('default', 'base-url'): base_url = self._value.get('default', 'base-url') self.logger.info("Using base-url from default profile %s" % base_url) else: self.logger.error( "No profile found. Please define a default profile, or specify a named profile using `--okta-profile`" ) sys.exit(1) return base_url def app_link_for(self, okta_profile): """ Gets app_link from config """ app_link = None if self._value.has_option(okta_profile, 'app-link'): app_link = self._value.get(okta_profile, 'app-link') elif self._value.has_option('default', 'app-link'): app_link = self._value.get('default', 'app-link') try: if not validators.url(app_link): self.logger.error( "The app-link provided: %s is an invalid url" % app_link) sys.exit(-1) except TypeError as ex: self.logger.error( "Malformed string in app link URL. Ensure there are no invalid characters." ) self.logger.info("App Link set as: %s" % app_link) return app_link def username_for(self, okta_profile): """ Gets username from config """ if self._value.has_option(okta_profile, 'username'): username = self._value.get(okta_profile, 'username') self.logger.info("Authenticating as: %s" % username) else: username = input('Enter username: '******'password'): password = self._value.get(okta_profile, 'password') else: password = getpass('Enter password: '******'factor'): factor = self._value.get(okta_profile, 'factor') self.logger.debug("Setting MFA factor to %s" % factor) return factor return None def duration_for(self, okta_profile): """ Gets requested duration from config, ignore it on failure """ if self._value.has_option(okta_profile, 'duration'): duration = self._value.get(okta_profile, 'duration') self.logger.debug("Requesting a duration of %s seconds" % duration) try: return int(duration) except ValueError: self.logger.warn("Duration could not be converted to a number," " ignoring.") return None def write_role_to_profile(self, okta_profile, role_arn): """ Saves role to profile in config """ if not self._value.has_section(okta_profile): self._value.add_section(okta_profile) base_url = self.base_url_for(okta_profile) self._value.set(okta_profile, 'base-url', base_url) self._value.set(okta_profile, 'role', role_arn) with open(self.config_path, 'w+') as configfile: self._value.write(configfile) def write_applink_to_profile(self, okta_profile, app_link): """ Saves app link to profile in config """ if not self._value.has_section(okta_profile): self._value.add_section(okta_profile) base_url = self.base_url_for(okta_profile) self._value.set(okta_profile, 'base-url', base_url) self._value.set(okta_profile, 'app-link', app_link) with open(self.config_path, 'w+') as configfile: self._value.write(configfile)
class BaseConfigStore(object): # the actual config store functionality def __init__(self, name, *arg, **kw): super(BaseConfigStore, self).__init__(*arg, **kw) self.dirty = False self.config = RawConfigParser() config_dir = os.environ.get('PHOTINI_CONFIG') if config_dir: config_dir = os.path.expanduser(config_dir) elif hasattr(appdirs, 'user_config_dir'): config_dir = appdirs.user_config_dir('photini') else: config_dir = appdirs.user_data_dir('photini') if not os.path.isdir(config_dir): os.makedirs(config_dir, mode=stat.S_IRWXU) self.file_name = os.path.join(config_dir, name + '.ini') if os.path.isfile(self.file_name): kwds = {'encoding': 'utf-8'} with open(self.file_name, 'r', **kwds) as fp: self.config.read_file(fp) self.has_section = self.config.has_section def get(self, section, option, default=None): if self.config.has_option(section, option): value = self.config.get(section, option) if not value: return None try: value = ast.literal_eval(value) except Exception: pass return value if default is not None: self.set(section, option, default) return default def set(self, section, option, value): value = pprint.pformat(value) if not self.config.has_section(section): self.config.add_section(section) elif (self.config.has_option(section, option) and self.config.get(section, option) == value): return self.config.set(section, option, value) self.dirty = True def delete(self, section, option): if not self.config.has_section(section): return if self.config.has_option(section, option): self.config.remove_option(section, option) if not self.config.options(section): self.config.remove_section(section) self.dirty = True def remove_section(self, section): if not self.config.has_section(section): return for option in self.config.options(section): self.config.remove_option(section, option) self.config.remove_section(section) self.dirty = True def save(self): if not self.dirty: return kwds = {'encoding': 'utf-8'} with open(self.file_name, 'w', **kwds) as fp: self.config.write(fp) os.chmod(self.file_name, stat.S_IRUSR | stat.S_IWUSR) self.dirty = False
import json from zipfile import ZipFile from flask import Flask, jsonify, render_template, request, send_file from flask.views import MethodView from werkzeug.contrib.fixers import ProxyFix from configparser import RawConfigParser app = Flask(__name__) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # Application definition config = RawConfigParser() config.read(os.path.join(BASE_DIR, 'settings.ini')) if config.has_section('defaults'): if config.has_option(section='defaults', option='FLASK_RUN_DIRECTORY'): FLASK_RUN_DIRECTORY = os.path.expanduser( config.get('defaults', 'FLASK_RUN_DIRECTORY')) if config.has_option(section='defaults', option='DREAM3D_DIRECTORY'): DREAM3D_DIRECTORY = os.path.expanduser( config.get('defaults', 'DREAM3D_DIRECTORY')) if config.has_option(section='defaults', option='TEMPORARY_PIPELINE_FILE_LOCATION'): TEMPORARY_PIPELINE_FILE_LOCATION = os.path.expanduser( config.get('defaults', 'TEMPORARY_PIPELINE_FILE_LOCATION')) class PipelineRunnerAPI(MethodView): """ @Creator: James Fourman
class Config(object): """A wrapper around RawConfigParser. Provides a ``defaults`` attribute of the same type which can be used to set default values. """ def __init__(self, version=None, _defaults=True): """Use read() to read in an existing config file. version should be an int starting with 0 that gets incremented if you want to register a new upgrade function. If None, upgrade is disabled. """ self._config = ConfigParser(dict_type=_sorted_dict) self.defaults = None if _defaults: self.defaults = Config(_defaults=False) self._version = version self._loaded_version = None self._upgrade_funcs = [] def _do_upgrade(self, func): assert self._loaded_version is not None assert self._version is not None old_version = self._loaded_version new_version = self._version if old_version != new_version: print_d("Config upgrade: %d->%d (%r)" % ( old_version, new_version, func)) func(self, old_version, new_version) def get_version(self): """Get the version of the loaded config file (for testing only) Raises Error if no file was loaded or versioning is disabled. """ if self._version is None: raise Error("Versioning disabled") if self._loaded_version is None: raise Error("No file loaded") return self._loaded_version def register_upgrade_function(self, function): """Register an upgrade function that gets called at each read() if the current config version and the loaded version don't match. Can also be registered after read was called. function(config, old_version: int, new_version: int) -> None """ if self._version is None: raise Error("Versioning disabled") self._upgrade_funcs.append(function) # after read(), so upgrade now if self._loaded_version is not None: self._do_upgrade(function) return function def reset(self, section, option): """Reset the value to the default state""" assert self.defaults is not None try: self._config.remove_option(section, option) except NoSectionError: pass def options(self, section): """Returns a list of options available in the specified section.""" try: options = self._config.options(section) except NoSectionError: if self.defaults: return self.defaults.options(section) raise else: if self.defaults: try: options.extend(self.defaults.options(section)) options = list_unique(options) except NoSectionError: pass return options def get(self, section, option, default=_DEFAULT): """get(section, option[, default]) -> str If default is not given or set, raises Error in case of an error """ try: return self._config.get(section, option) except Error: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.get(section, option) except Error: pass raise return default def gettext(self, *args, **kwargs): value = self.get(*args, **kwargs) # make sure there are no surrogates value.encode("utf-8") return value def getbytes(self, section, option, default=_DEFAULT): try: value = self._config.get(section, option) value = value.encode("utf-8", "surrogateescape") return value except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getbytes(section, option) except Error: pass raise Error(e) return default def getboolean(self, section, option, default=_DEFAULT): """getboolean(section, option[, default]) -> bool If default is not given or set, raises Error in case of an error """ try: return self._config.getboolean(section, option) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getboolean(section, option) except Error: pass raise Error(e) return default def getint(self, section, option, default=_DEFAULT): """getint(section, option[, default]) -> int If default is not give or set, raises Error in case of an error """ try: return int(self._config.getfloat(section, option)) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getint(section, option) except Error: pass raise Error(e) return default def getfloat(self, section, option, default=_DEFAULT): """getfloat(section, option[, default]) -> float If default is not give or set, raises Error in case of an error """ try: return self._config.getfloat(section, option) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getfloat(section, option) except Error: pass raise Error(e) return default def getstringlist(self, section, option, default=_DEFAULT): """getstringlist(section, option[, default]) -> list If default is not given or set, raises Error in case of an error. Gets a list of strings, using CSV to parse and delimit. """ try: value = self._config.get(section, option) parser = csv.reader( [value], lineterminator='\n', quoting=csv.QUOTE_MINIMAL) try: vals = next(parser) except (csv.Error, ValueError) as e: raise Error(e) return vals except Error as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getstringlist(section, option) except Error: pass raise Error(e) return default def setstringlist(self, section, option, values): """Saves a list of unicode strings using the csv module""" sw = StringIO() values = [str(v) for v in values] writer = csv.writer(sw, lineterminator='\n', quoting=csv.QUOTE_MINIMAL) writer.writerow(values) self.set(section, option, sw.getvalue()) def setlist(self, section, option, values, sep=","): """Saves a list of str using ',' as a separator and \\ for escaping""" values = [str(v) for v in values] joined = join_escape(values, sep) self.set(section, option, joined) def getlist(self, section, option, default=_DEFAULT, sep=","): """Returns a str list saved with setlist()""" try: value = self._config.get(section, option) return split_escape(value, sep) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getlist(section, option, sep=sep) except Error: pass raise Error(e) return default def set(self, section, option, value): """Saves the string representation for the passed value Don't pass unicode, encode first. """ if isinstance(value, bytes): raise TypeError("use setbytes") # RawConfigParser only allows string values but doesn't # scream if they are not (and it only fails before the # first config save..) if not isinstance(value, str): value = str(value) try: self._config.set(section, option, value) except NoSectionError: if self.defaults and self.defaults.has_section(section): self._config.add_section(section) self._config.set(section, option, value) else: raise def settext(self, section, option, value): value = str(value) # make sure there are no surrogates value.encode("utf-8") self.set(section, option, value) def setbytes(self, section, option, value): assert isinstance(value, bytes) value = value.decode("utf-8", "surrogateescape") self.set(section, option, value) def write(self, filename): """Write config to filename. Can raise EnvironmentError """ assert isinstance(filename, fsnative) mkdir(os.path.dirname(filename)) # temporary set the new version for saving if self._version is not None: self.add_section("__config__") self.set("__config__", "version", self._version) try: with atomic_save(filename, "wb") as fileobj: temp = StringIO() self._config.write(temp) data = temp.getvalue().encode("utf-8", "surrogateescape") fileobj.write(data) finally: if self._loaded_version is not None: self.set("__config__", "version", self._loaded_version) def clear(self): """Remove all sections.""" for section in self._config.sections(): self._config.remove_section(section) def is_empty(self): """Whether the config has any sections""" return not self._config.sections() def read(self, filename): """Reads the config from `filename` if the file exists, otherwise does nothing Can raise EnvironmentError, Error. """ try: with open(filename, "rb") as fileobj: fileobj = StringIO( fileobj.read().decode("utf-8", "surrogateescape")) self._config.readfp(fileobj, filename) except (IOError, OSError): return # don't upgrade if we just created a new config if self._version is not None: self._loaded_version = self.getint("__config__", "version", -1) for func in self._upgrade_funcs: self._do_upgrade(func) def has_option(self, section, option): """If the given section exists, and contains the given option""" return self._config.has_option(section, option) or ( self.defaults and self.defaults.has_option(section, option)) def has_section(self, section): """If the given section exists""" return self._config.has_section(section) or ( self.defaults and self.defaults.has_section(section)) def remove_option(self, section, option): """Remove the specified option from the specified section Can raise Error. """ return self._config.remove_option(section, option) def add_section(self, section): """Add a section named section to the instance if it not already exists.""" if not self._config.has_section(section): self._config.add_section(section)
config_name = sys.argv[1] config = RawConfigParser(options) config.read("grid_search.cfg") def parse_range(key): value = options[key] parsed_value = ast.literal_eval(value) if isinstance(parsed_value, list): options[key] = parsed_value else: options[key] = [parsed_value] # overwrite default values for options if config.has_section(config_name): for option_name in options.keys(): options[option_name] = config.get(config_name, option_name) parse_range('epochs') # Set up the input input_data = pickle.load(open(options['training_file'], 'rb'), encoding='latin1') rectangles = np.array(list(map(lambda x: x[0], input_data['data'])), dtype=np.float32) rectangles = rectangles.reshape((-1, 28, 28)) dimension_names = input_data['dimensions'] length_of_data_set = len(rectangles) labels = np.array(list(map(lambda x: x[1:], input_data['data'])), dtype=np.float32)
matcher = re.match(r"""^__version__\s*=\s*['"](.*)['"]\s*$""", line) version = version or matcher and matcher.group(1) with codecs.open( os.path.join(os.path.dirname(__file__), "README.md"), encoding="utf-8" ) as fd: long_description = fd.read() sources, backup_points, collect_points, filters, hooks = [], [], [], [], [] engines_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "engines.ini") if os.path.isfile(engines_file): parser = RawConfigParser() parser.read([engines_file]) if parser.has_section("sources"): sources = ["%s = %s" % (key, value) for key, value in parser.items("sources")] if parser.has_section("backup_points"): backup_points = [ "%s = %s" % (key, value) for key, value in parser.items("backup_points") ] if parser.has_section("collect_points"): collect_points = [ "%s = %s" % (key, value) for key, value in parser.items("collect_points") ] if parser.has_section("filters"): filters = ["%s = %s" % (key, value) for key, value in parser.items("filters")] if parser.has_section("hooks"): hooks = ["%s = %s" % (key, value) for key, value in parser.items("hooks")] command_suffix = "3" if sys.version_info[0] == 3 else ""
class APIVersionWriter(TemplateFileWriter): """ Provide usefull method to write Go files. """ def __init__(self, monolithe_config, api_info): """ Initializes a _GoSDKAPIVersionFileWriter """ super(APIVersionWriter, self).__init__(package="monolithe.generators.lang.go") self.monolithe_config = monolithe_config self.api_version = api_info["version"] self.api_root = api_info["root"] self.api_prefix = api_info["prefix"] self._output = self.monolithe_config.get_option( "output", "transformer") self._transformation_name = self.monolithe_config.get_option( "name", "transformer") self._product_accronym = self.monolithe_config.get_option( "product_accronym") self._product_name = self.monolithe_config.get_option("product_name") self.output_directory = "%s/go/%s" % (self._output, self._transformation_name) self.attrs_defaults = RawConfigParser() path = "%s/go/__attributes_defaults/attrs_defaults.ini" % self._output self.attrs_defaults.optionxform = str self.attrs_defaults.read(path) with open("%s/go/__code_header" % self._output, "r") as f: self.header_content = f.read() def perform(self, specifications): """ """ self._write_info() self._write_session() task_manager = TaskManager() for rest_name, specification in specifications.items(): task_manager.start_task(method=self._write_model, specification=specification, specification_set=specifications) task_manager.wait_until_exit() self._format() def _write_info(self): """ """ self.write(destination=self.output_directory, filename="sdkinfo.go", template_name="sdkinfo.go.tpl", version=self.api_version, product_accronym=self._product_accronym, root_api=self.api_root, api_prefix=self.api_prefix, product_name=self._product_name, name=self._transformation_name, header=self.header_content) def _write_session(self): """ """ self.write(destination=self.output_directory, filename="session.go", template_name="session.go.tpl", version=self.api_version, root_api=self.api_root, api_prefix=self.api_prefix, name=self._transformation_name, header=self.header_content) def _write_model(self, specification, specification_set): """ """ filename = "%s.go" % (specification.entity_name.lower()) defaults = {} section = specification.entity_name if self.attrs_defaults.has_section(section): for attribute in self.attrs_defaults.options(section): defaults[attribute] = self.attrs_defaults.get( section, attribute) self.write(destination=self.output_directory, filename=filename, template_name="model.go.tpl", specification=specification, specification_set=specification_set, name=self._transformation_name, header=self.header_content, attribute_defaults=defaults) return (filename, specification.entity_name) def _format(self): """ """ os.system("gofmt -w '%s' >/dev/null 2>&1" % self.output_directory)
class CommonVariables: def __init__(self, ini_file_name=None): self.task_section = "task" self.database_section = "database" self.default_section = "DEFAULT" if ini_file_name is None: self.ini_file = "variables.ini" else: self.ini_file = ini_file_name self.cfg = RawConfigParser() self.create_file() def create_file(self): if not Path(self.__get_file_path()).exists(): self.cfg['DEFAULT'] = {'recurring_month_limit': 2, 'default_date_expression': 'today', 'default_text_field_length': 50, 'date_format': '%Y-%m-%d', 'date_time_format': '%Y-%m-%d %H:%M:%S', 'time_format': '%H:%M:%S', 'rfc3339_date_time_format': '%Y-%m-%dT%H:%M:%S.%fZ', 'file_name_timestamp': '%Y%m%d_%H%M%S', 'default_project_name': '', 'default_label': '', 'default_text': '', 'enable_redis': False, 'redis_host': 'localhost', 'redis_port': 6379, 'max_snapshot_rows': 10} os.makedirs(self.resources_dir, exist_ok=True) self.__save() def __get_file_path(self): return f"{self.resources_dir}/{self.ini_file}" def __read_file(self): path = self.__get_file_path() with open(path, 'r') as configfile: self.cfg.read_file(configfile) def __get(self, key, section): self.__read_file() try: return self.cfg.get(section, key) except NoSectionError: return self.cfg.get(self.default_section, key) def __getint(self, key, section): self.__read_file() try: return self.cfg.getint(section, key) except NoSectionError: return self.cfg.getint(self.default_section, key) def __set(self, key, value, section): self.__read_file() if section is not self.default_section and self.cfg.has_section(section) is False: self.cfg.add_section(section) self.cfg.set(section, key, value) self.__save() def __save(self): path = self.__get_file_path() with open(path, 'w') as configfile: self.cfg.write(configfile) def reset(self): self.cfg.clear() path = self.__get_file_path() with open(path, 'w') as configfile: self.cfg.write(configfile) @property def log_dir(self): return f"{Path.home()}/.config/taskmgr/log/" @property def credentials_dir(self): return f"{Path.home()}/.config/taskmgr/credentials/" @property def resources_dir(self): return f"{Path.home()}/.config/taskmgr/resources/" @property def date_format(self): return self.__get("date_format", self.default_section) @staticmethod def validate_date_format(date_string:str) -> bool: return re.match(r'^\d{4}-\d{2}-\d{2}$', date_string) is not None @property def date_time_format(self): return self.__get("date_time_format", self.default_section) @property def time_format(self): return self.__get("time_format", self.default_section) @property def rfc3339_date_time_format(self): return self.__get("rfc3339_date_time_format", self.default_section) @property def file_name_timestamp(self): return self.__get("file_name_timestamp", self.default_section) @property def max_snapshot_rows(self): return self.__getint("max_snapshot_rows", self.default_section) @max_snapshot_rows.setter def max_snapshot_rows(self, value): if value is not None: self.__set("max_snapshot_rows", int(value), self.default_section) @property def default_text(self): return self.__get("default_text", self.task_section) @property def default_label(self): return self.__get("default_label", self.task_section) @default_label.setter def default_label(self, value): if value is not None: self.__set("default_label", value, self.task_section) @property def recurring_month_limit(self): return self.__getint("recurring_month_limit", self.task_section) @recurring_month_limit.setter def recurring_month_limit(self, value): if value is not None: self.__set("recurring_month_limit", value, self.task_section) @property def default_date_expression(self): return self.__get("default_date_expression", self.task_section) @property def default_project_name(self): return self.__get("default_project_name", self.task_section) @default_project_name.setter def default_project_name(self, value): if value is not None: self.__set("default_project_name", value, self.task_section) @property def default_text_field_length(self): return self.__getint("default_text_field_length", self.task_section) @default_text_field_length.setter def default_text_field_length(self, value): if value is not None: self.__set("default_text_field_length", str(value), self.task_section) @property def enable_redis(self): return ast.literal_eval(self.__get("enable_redis", self.database_section)) @enable_redis.setter def enable_redis(self, value): if value is not None: self.__set("enable_redis", str(value), self.database_section) @property def redis_host(self): return self.__get("redis_host", self.database_section) @redis_host.setter def redis_host(self, value): if value is not None: self.__set("redis_host", str(value), self.database_section) @property def redis_port(self): return self.__getint("redis_port", self.database_section) @redis_port.setter def redis_port(self, value): if value is not None: self.__set("redis_port", int(value), self.database_section) def __iter__(self): yield 'default_text_field_length', self.default_text_field_length yield 'default_project_name', self.default_project_name yield 'default_label', self.default_label yield 'recurring_month_limit', self.recurring_month_limit yield 'default_date_expression', self.default_date_expression yield 'enable_redis', self.enable_redis yield 'redis_host', self.redis_host yield 'redis_port', self.redis_port yield 'max_snapshot_rows', self.max_snapshot_rows
def parse_config(config_file): """ Parse config file and return true if all ok All config settings are stored in globar vars :config_file: config file name """ global trello_api_key, trello_api_secret, trello_token, trello_token_secret global taskwarrior_taskrc_location, taskwarrior_data_location global sync_projects sync_projects = [] conf = RawConfigParser() try: conf.read(config_file) except Exception: return False if (not conf.has_option('DEFAULT', 'trello_api_key') or not conf.has_option('DEFAULT', 'trello_api_secret') or not conf.has_option('DEFAULT', 'trello_token') or not conf.has_option('DEFAULT', 'trello_token_secret') or not conf.has_option('DEFAULT', 'sync_projects')): return False for sync_project in conf.get('DEFAULT', 'sync_projects').split(): if conf.has_section(sync_project): if conf.has_option(sync_project, 'tw_project_name') and conf.has_option( sync_project, 'trello_board_name'): project = {} project['project_name'] = sync_project project['tw_project_name'] = conf.get(sync_project, 'tw_project_name') project['trello_board_name'] = conf.get( sync_project, 'trello_board_name') if conf.has_option(sync_project, 'trello_todo_list'): project['trello_todo_list'] = conf.get( sync_project, 'trello_todo_list') else: project['trello_todo_list'] = 'To Do' if conf.has_option(sync_project, 'trello_doing_list'): project['trello_doing_list'] = conf.get( sync_project, 'trello_doing_list') else: project['trello_doing_list'] = 'Doing' if conf.has_option(sync_project, 'trello_done_list'): project['trello_done_list'] = conf.get( sync_project, 'trello_done_list') else: project['trello_done_list'] = 'Done' sync_projects.append(project) else: return False else: return False trello_api_key = conf.get('DEFAULT', 'trello_api_key') trello_api_secret = conf.get('DEFAULT', 'trello_api_secret') trello_token = conf.get('DEFAULT', 'trello_token') trello_token_secret = conf.get('DEFAULT', 'trello_token_secret') if conf.has_option('DEFAULT', 'taskwarrior_taskrc_location'): taskwarrior_taskrc_location = conf.get('DEFAULT', 'taskwarrior_taskrc_location') else: taskwarrior_taskrc_location = '~/.taskrc' if conf.has_option('DEFAULT', 'taskwarrior_data_location'): taskwarrior_data_location = conf.get('DEFAULT', 'taskwarrior_data_location') else: taskwarrior_data_location = '~/.task' return True
class APIVersionWriter(TemplateFileWriter): """ Provide usefull method to write Go files. """ def __init__(self, monolithe_config, api_info): """ Initializes a _GoSDKAPIVersionFileWriter """ super(APIVersionWriter, self).__init__(package="monolithe.generators.lang.go") self.monolithe_config = monolithe_config self.api_version = api_info["version"] self.api_root = api_info["root"] self.api_prefix = api_info["prefix"] self._output = self.monolithe_config.get_option("output", "transformer") self._transformation_name = self.monolithe_config.get_option("name", "transformer") self._product_accronym = self.monolithe_config.get_option("product_accronym") self._product_name = self.monolithe_config.get_option("product_name") self.output_directory = "%s/go/%s" % (self._output, self._transformation_name) self.attrs_defaults = RawConfigParser() path = "%s/go/__attributes_defaults/attrs_defaults.ini" % self._output self.attrs_defaults.optionxform = str self.attrs_defaults.read(path) with open("%s/go/__code_header" % self._output, "r") as f: self.header_content = f.read() def perform(self, specifications): """ """ self._write_info() self._write_session() task_manager = TaskManager() for rest_name, specification in specifications.items(): task_manager.start_task(method=self._write_model, specification=specification, specification_set=specifications) task_manager.wait_until_exit() self._format() def _write_info(self): """ """ self.write(destination=self.output_directory, filename="sdkinfo.go", template_name="sdkinfo.go.tpl", version=self.api_version, product_accronym=self._product_accronym, root_api=self.api_root, api_prefix=self.api_prefix, product_name=self._product_name, name=self._transformation_name, header=self.header_content) def _write_session(self): """ """ self.write(destination=self.output_directory, filename="session.go", template_name="session.go.tpl", version=self.api_version, root_api=self.api_root, api_prefix=self.api_prefix, name=self._transformation_name, header=self.header_content) def _write_model(self, specification, specification_set): """ """ filename = "%s.go" % (specification.entity_name.lower()) defaults = {} section = specification.entity_name if self.attrs_defaults.has_section(section): for attribute in self.attrs_defaults.options(section): defaults[attribute] = self.attrs_defaults.get(section, attribute) self.write(destination=self.output_directory, filename=filename, template_name="model.go.tpl", specification=specification, specification_set=specification_set, name=self._transformation_name, header=self.header_content, attribute_defaults=defaults) return (filename, specification.entity_name) def _format(self): """ """ os.system("gofmt -w '%s' >/dev/null 2>&1" % self.output_directory)
class TwitterBot(object): def __init__(self, config_filename=None): self.config_filename = config_filename self.config = RawConfigParser( defaults={ # DEFAULTS # general 'robust': 'False', # logging 'loglevel': 'DEBUG', 'logformat': '[%(asctime)s] %(levelname)s %(name)s: %(message)s', 'dateformat': '%Y-%m-%dT%H:%M:%S', # irc 'use_ssl': 'False', 'nick': 'twitterbot', 'password': '', 'msg_prefix': '', 'notification_command': 'NOTICE', # twitter 'poll_interval': '60.0', 'skip_old_tweets_on_start': 'True', 'query': '', # urls 'surround_urls_with_space': 'True', 'use_expanded_urls': 'False', 'follow_redirects': 'False', 'follow_only_domains': '', 'detect_urls_by_regex': 'False', }) self.config.read(self.config_filename) # setup general self.be_robust = (self.config.has_section('general') and self.config.getboolean('general', 'robust')) # setup logging self.log = logging.getLogger('twitterbot') self.log.setLevel( getattr(logging, self.config.get('logging', 'loglevel').upper())) log_stream = logging.StreamHandler() log_stream.setFormatter( logging.Formatter(self.config.get('logging', 'logformat'), self.config.get('logging', 'dateformat'))) self.log.addHandler(log_stream) # setup twitter self.twitter = twitter.Api( consumer_key=self.config.get('oauth', 'consumer_key'), consumer_secret=self.config.get('oauth', 'consumer_secret'), access_token_key=self.config.get('oauth', 'access_token_key'), access_token_secret=self.config.get('oauth', 'access_token_secret')) self.query = self.config.get('twitter', 'query') self.seen_tweets = set() # set of ids # get url expansion preferences self.surround_urls_with_space = \ self.config.getboolean('urls', 'surround_urls_with_space') self.use_expanded_urls = \ self.config.getboolean('urls', 'use_expanded_urls') self.follow_redirects = \ self.config.getboolean('urls', 'follow_redirects') self.follow_only_domains = [ domain.strip() for domain in self.config.get( 'urls', 'follow_only_domains').strip().split(',') if domain.strip() ] self.detect_urls_by_regex = \ self.config.getboolean('urls', 'detect_urls_by_regex') # setup irc self.irc = irc.client.Reactor() if hasattr( irc.client, 'Reactor') else irc.client.IRC() self.irc.add_global_handler('privmsg', self.handle_privmsg) self.irc_server = self.irc.server() self.irc_server_name = self.config.get('irc', 'server') self.irc_use_ssl = self.config.getboolean('irc', 'use_ssl') self.irc_server_port = (self.config.getint( 'irc', 'port') if self.config.has_option('irc', 'port') else None) self.nick = self.config.get('irc', 'nick') self.irc_password = self.config.get('irc', 'password') self.channels = [ channel.strip() for channel in self.config.get( 'irc', 'channels').strip().split(',') if channel.strip() ] # setup scheduled tasks self.scheduler = Scheduler([ ScheduledTask(self.process_irc_events, delta=0.25), ScheduledTask(self.process_twitter, delta=self.config.getfloat('twitter', 'poll_interval')) ]) def handle_privmsg(self, connection, event): pass #TODO: """ help follow foo add-to-query foo alter query search foo # add notification in the channels """ def process_irc_events(self): self.irc.process_once() def _get_tweets(self): """Returns: new twitter status objects.""" tweets = list(reversed(self.twitter.GetHomeTimeline())) if self.query: tweets += list(reversed(self.twitter.GetSearch(self.query))) seen_tweets = set(tweet.id for tweet in tweets) tweets = [ tweet for tweet in tweets if tweet.id not in self.seen_tweets ] if len(self.seen_tweets) < 100000: self.seen_tweets.update(seen_tweets) else: self.seen_tweets = seen_tweets return tweets def notify_channels(self, message): command = '%s %s :' % (self.config.get( 'irc', 'notification_command'), ','.join(self.channels)) for chunk in split_utf8_at_space(message.encode('utf8'), IRC_LINE_LIMIT - len(command)): self.irc_server.send_raw(command + chunk.decode('utf8')) def _follow_url(self, url): """Follows urls and returns the result.""" self.log.debug('[following] %r', url) try: if self.follow_only_domains: if urllib.parse.urlparse(url).hostname not in \ self.follow_only_domains: return url r = requests.head(url, allow_redirects=True) r.raise_for_status() result = r.url self.log.debug('[following] %r -> %r', url, result) return result except requests.ConnectionError as e: self.log.debug('[following] %r - connection error: %s', url, e) except Exception as e: self.log.exception('[following] %r error: %s', url, e, exc_info=e) return url def _handle_url_expansion(self, message, tweet_urls, max_link_length=None): """ :param message: the tweet as text :param tweet_url: {short_url: expanded_url} :param max_link_length: if expanded urls shorten if final url is longer """ if not (self.surround_urls_with_space, self.use_expanded_urls, self.follow_redirects): return message tweet_urls = (tweet_urls or {}).copy() self.log.debug("[expanding urls] (using urls %r) %r", tweet_urls, message) if self.detect_urls_by_regex: non_urled = reduce(lambda s, url: s.replace(url, ' '), sorted(tweet_urls, key=len, reverse=True), message) detected_urls = re.findall("(https?://[^ )]+)", non_urled) tweet_urls.update((url, url) for url in detected_urls) if not self.use_expanded_urls: tweet_urls = dict((url, url) for url in tweet_urls) if self.follow_redirects: tweet_urls = dict((url, self._follow_url(value)) for (url, value) in tweet_urls.items()) if tweet_urls: url_replace = lambda url: ( (" ?%s ?" if self.surround_urls_with_space else "%s") % re.escape(url)) def get_final_url(matched_text): res = tweet_urls.get(matched_text.strip(), matched_text) if len(res) > max_link_length: return matched_text else: return res message = re.sub( '|'.join( map(url_replace, sorted(tweet_urls, key=len, reverse=True))), lambda m: "{sep}{url}{sep}".format( sep=' ' if self.surround_urls_with_space else '', url=tweet_urls.get(m.group(0).strip(), m.group(0))), message) return message.strip() def _urls_to_dict(self, url_list): """Extracts urls from tweet status url list to {url: expanded}.""" url_list = url_list or [] return dict((u.url, u.expanded_url) for u in url_list) def _ircfy_tweet(self, tweet): """Takes a twitter status and outputs irc message.""" message = tweet.text urls = tweet.urls if tweet.retweeted_status: #HACK: because iPhone sucks and does not correctly handle RT message = "RT @{0}: {1}".format( tweet.retweeted_status.user.screen_name, tweet.retweeted_status.text) urls = tweet.urls try: message = HTMLParser().unescape(message) except: self.log.exception("Unable to escape message %r", message) message = "{surround}{screen_name}{surround}: {message}".format( surround=IRC_BOLD, screen_name=tweet.user.screen_name, message=message) message = message.replace('\r', '').replace('\n', ' ') urls = self._urls_to_dict(urls) message = self._handle_url_expansion(message, urls, 440) return message def process_twitter(self): tweets = self._get_tweets() self.log.debug("Fetched %d new tweets", len(tweets)) for tweet in tweets: message = self._ircfy_tweet(tweet) self.log.debug("[notifying] %r", message) self.notify_channels(message) def die(self): self.log.debug("Quiting...") self.irc_server.send_raw('quit') def run(self): self._connect_irc() if self.config.getboolean('twitter', 'skip_old_tweets_on_start'): self._get_tweets() while True: try: self.scheduler.run_forever() except KeyboardInterrupt: self.die() break except twitter.TwitterError as e: # these are mostly harmless - twitter being down or rate # limit exceeded self.log.info("Twitter error: %s", e) except http.client.BadStatusLine as e: # for whatever reason twitter sucks self.log.info("BadStatusLine (probably twitter error): %s", e) except irc.client.ServerNotConnectedError: self.log.debug("Not connected to irc server. " "Trying to reconnect...") self._connect_irc() except UnicodeDecodeError: self.log.exception("Unicode exception: %s", e, exc_info=e) pass # not good except Exception as e: self.log.exception("Unhandled exception: %s", e) if not self.be_robust: raise def _connect_irc(self): self.log.debug('Connecting to %s...', self.irc_server_name) self.irc_server.connect( self.irc_server_name, self.irc_server_port or (self.irc_use_ssl and 6697 or 6667), self.nick, self.irc_password, connect_factory=( self.irc_use_ssl and irc.connection.Factory(wrapper=ssl.wrap_socket) # FIXME: there is no ssl verification or irc.connection.Factory())) self.log.debug('Connected to %s.', self.irc_server_name) for channel in self.channels: self.irc_server.join(channel) self.log.debug('Joined channels %s .', ','.join(self.channels))
class ParamStore(object): def __init__(self, root_dir, file_name): self._lock = threading.Lock() with self._lock: if not os.path.isdir(root_dir): raise RuntimeError('Directory "' + root_dir + '" does not exist.') self._path = os.path.join(root_dir, file_name) self._dirty = False # open config file self._config = RawConfigParser() self._config.read(self._path) def flush(self): if not self._dirty: return with self._lock: self._dirty = False with open(self._path, 'w') as of: self._config.write(of) def get(self, section, option, default=None): """Get a parameter value and return a string. If default is specified and section or option are not defined in the file, they are created and set to default, which is then the return value. """ with self._lock: if not self._config.has_option(section, option): if default is not None: self._set(section, option, default) return default return self._config.get(section, option) def get_datetime(self, section, option, default=None): result = self.get(section, option, default) if result: return WSDateTime.from_csv(result) return result def set(self, section, option, value): """Set option in section to string value.""" with self._lock: self._set(section, option, value) def _set(self, section, option, value): if not self._config.has_section(section): self._config.add_section(section) elif (self._config.has_option(section, option) and self._config.get(section, option) == value): return self._config.set(section, option, value) self._dirty = True def unset(self, section, option): """Remove option from section.""" with self._lock: if not self._config.has_section(section): return if self._config.has_option(section, option): self._config.remove_option(section, option) self._dirty = True if not self._config.options(section): self._config.remove_section(section) self._dirty = True
def get_pages_list(comic_info: RawConfigParser, section_name="Pages"): if comic_info.has_section("Pages"): return [{"template_name": option, "title": web_path(comic_info.get(section_name, option))} for option in comic_info.options(section_name)] return []
class ConfigStore: def __init__(self): self._config = RawConfigParser() self._path = os.path.join(__configdir__, 'config.ini') if os.path.exists(self._path): self._config.read(self._path) elif not os.path.exists(__configdir__): os.mkdir(__configdir__) self._load() for env in PlatformEnvironment.values(): if not self.has_environment(env): self.add_environment(env) def _load(self): if not self._config.has_section(SYSTEM_SECTION): self._config[SYSTEM_SECTION] = {} self.set_environment(PlatformEnvironment.LOCAL) def _get_integration_config_slot(self, integration_name): environment = self.get_environment() return f'{environment.value}/integrations/{integration_name}/config' def _get_integration_credentials_slot(self, integration_name): environment = self.get_environment() return (f'{environment.value}/integrations/' f'{integration_name}/credentials') def _parse_value(self, value): try: return int(value) except Exception: return value def _setup_environment(self, environment, values): if not self.has_environment(environment): self.add_environment(environment) for variable, value in values.items(): self.put_value(environment, variable, value) def _setup_local(self): values = { PlatformVariable.FRONTEND: 'http://localhost:8000', PlatformVariable.CMS: 'http://localhost:8001', PlatformVariable.INTEGRATIONS_MANAGER: 'http://localhost:8002', PlatformVariable.OPERATIONS: 'http://localhost:8003', } self._setup_environment(PlatformEnvironment.LOCAL, values) def _setup_development(self): values = { PlatformVariable.FRONTEND: ('https://cms-frontend-development.bots-platform.com'), PlatformVariable.CMS: ('https://cms-backend-development.bots-platform.com'), PlatformVariable.INTEGRATIONS_MANAGER: ('https://integrations-manager-development.bots-platform.com'), PlatformVariable.OPERATIONS: ('https://operations-controller-development.bots-platform.com') } self._setup_environment(PlatformEnvironment.DEVELOPMENT, values) def _setup_staging(self): values = { PlatformVariable.FRONTEND: ('https://cms-frontend-staging.bots-platform.com'), PlatformVariable.CMS: ('https://cms-backend-staging.bots-platform.com'), PlatformVariable.INTEGRATIONS_MANAGER: ('https://integrations-manager-staging.bots-platform.com'), PlatformVariable.OPERATIONS: ('https://operations-controller-staging-lb.bots-platform.com') } self._setup_environment(PlatformEnvironment.STAGING, values) def _setup_performance(self): values = { PlatformVariable.FRONTEND: ( # CMS frontend will be the same AFAIK 'https://cms-frontend-development.bots-platform.com'), PlatformVariable.CMS: ( # CMS backend is coming soon 'https://cms-backend-development.bots-platform.com'), PlatformVariable.INTEGRATIONS_MANAGER: ('https://integrations-manager-performance.bots-platform.com'), PlatformVariable.OPERATIONS: ('https://operations-controller-performance.bots-platform.com') } self._setup_environment(PlatformEnvironment.PERFORMANCE, values) def _setup_production(self): values = { PlatformVariable.FRONTEND: 'https://bots.wizeline.com', PlatformVariable.CMS: ('https://cms-backend-production.bots-platform.com'), PlatformVariable.INTEGRATIONS_MANAGER: ('https://integrations-manager-production-lb.bots-platform.com'), PlatformVariable.OPERATIONS: ('https://bots-api.wizeline.com') } self._setup_environment(PlatformEnvironment.PRODUCTION, values) def add_environment(self, environment): self._config[environment.value] = {} def commit(self): with open(self._path, 'w') as config_file: self._config.write(config_file) def del_value(self, environment, variable): try: del self._config[environment.value][variable.value] except KeyError as ex: missing_key = ex.args[0] if missing_key == environment.value: raise UndefinedConfigSection(missing_key) else: raise UndefinedConfigValue(environment.value, missing_key) def get_environment(self): raw_environment = self._config[SYSTEM_SECTION]['environment'] return PlatformEnvironment(raw_environment) def get_integration_config(self, integration_name): integration_slot = self._get_integration_config_slot(integration_name) if not self.has_section(integration_slot): return {} return self.get_values_from_section(integration_slot) def get_integration_credentials(self, integration_name): integration_slot = self._get_integration_credentials_slot( integration_name) if not self.has_section(integration_slot): return {} return self.get_values_from_section(integration_slot) def get_value(self, environment, variable): if not self.has_environment(environment): raise UndefinedConfigSection(environment.value) values = self._config[environment.value] if variable.value not in values: raise UndefinedConfigValue(environment, variable) return self._config[environment.value][variable.value] def get_values_from_section(self, section): return { option: self._parse_value(value) for option, value in self._config[section].items() } def has_environment(self, environment): return self.has_section(environment.value) def has_section(self, section): return self._config.has_section(section) def put_value(self, environment, variable, value): try: self._config[environment.value].update({variable.value: value}) except KeyError: raise UndefinedConfigSection(environment) def set_environment(self, environment): self._config[SYSTEM_SECTION]['environment'] = environment.value def set_integration_config(self, integration_name, integration_config): integration_slot = self._get_integration_config_slot(integration_name) if not self._config.has_section(integration_slot): self._config.add_section(integration_slot) self.set_values_at_section(integration_slot, **integration_config) def set_integration_credentials(self, integration_name, integration_credentials): integration_slot = self._get_integration_credentials_slot( integration_name) if not self._config.has_section(integration_slot): self._config.add_section(integration_slot) self.set_values_at_section(integration_slot, **integration_credentials) def set_values_at_section(self, section, **kwvalues): for option, value in kwvalues.items(): self._config[section][option] = str(value) def setup(self): self._setup_local() self._setup_development() self._setup_staging() self._setup_performance() self._setup_production()
class Settings: def __init__(self): self.__config = RawConfigParser() self.items = self.__config.items if os.name == 'posix': aphom = expanduser("~/.config") if isdir(aphom): self.__app_home = join(aphom, "XRCEA") else: self.__app_home = expanduser("~/.XRCEA") elif os.name == 'nt': if isdir(expanduser("~/Application Data")): self.__app_home = expanduser("~/Application Data/XRCEA") else: self.__app_home = expanduser("~/XRCEA") else: self.__app_home = normpath(expanduser("~/XRCEA")) if isfile(self.__app_home): os.remove(self.__app_home) if not isdir(self.__app_home): os.mkdir(self.__app_home, 0o755) self.__config.read(self.get_home("XRCEA.cfg")) self.declare_section("PALETTE") self.__default_colors = {} def declare_section(self, section): if not self.__config.has_section(section): self.__config.add_section(section) def get(self, name, default=None, section='DEFAULT'): if not self.__config.has_option(section, name): return default if default is not None: deft = type(default) else: deft = str try: if deft is float: return self.__config.getfloat(section, name) if deft is int: return self.__config.getint(section, name) if deft is bool: return self.__config.getboolean(section, name) return deft(self.__config.get(section, name)) except ValueError: print("Warning: cannot convert {} into {}".format( repr(self.__config.get(section, name)), deft.__name__)) return default def get_color(self, name): if name is None: return if not self.__config.has_option("PALETTE", name): return self.__default_colors.get(name) return self.__config.get("PALETTE", name) def set(self, name, val, section='DEFAULT'): if not isinstance(val, str): val = repr(val) self.__config.set(section, name, val) def set_color(self, name, val): self.__config.set("PALETTE", name, val) def add_default_colors(self, colors): self.__default_colors.update(colors) def get_home(self, name=''): if name: return join(self.__app_home, name) return self.__app_home def save(self): with open(self.get_home("XRCEA.cfg"), "w") as fobj: self.__config.write(fobj)
class Config(object): """A wrapper around RawConfigParser. Provides a ``defaults`` attribute of the same type which can be used to set default values. """ def __init__(self, version=None, _defaults=True): """Use read() to read in an existing config file. version should be an int starting with 0 that gets incremented if you want to register a new upgrade function. If None, upgrade is disabled. """ self._config = ConfigParser(dict_type=_sorted_dict) self.defaults = None if _defaults: self.defaults = Config(_defaults=False) self._version = version self._loaded_version = None self._upgrade_funcs = [] def _do_upgrade(self, func): assert self._loaded_version is not None assert self._version is not None old_version = self._loaded_version new_version = self._version if old_version != new_version: print_d("Config upgrade: %d->%d (%r)" % (old_version, new_version, func)) func(self, old_version, new_version) def get_version(self): """Get the version of the loaded config file (for testing only) Raises Error if no file was loaded or versioning is disabled. """ if self._version is None: raise Error("Versioning disabled") if self._loaded_version is None: raise Error("No file loaded") return self._loaded_version def register_upgrade_function(self, function): """Register an upgrade function that gets called at each read() if the current config version and the loaded version don't match. Can also be registered after read was called. function(config, old_version: int, new_version: int) -> None """ if self._version is None: raise Error("Versioning disabled") self._upgrade_funcs.append(function) # after read(), so upgrade now if self._loaded_version is not None: self._do_upgrade(function) return function def reset(self, section, option): """Reset the value to the default state""" assert self.defaults is not None try: self._config.remove_option(section, option) except NoSectionError: pass def options(self, section): """Returns a list of options available in the specified section.""" try: options = self._config.options(section) except NoSectionError: if self.defaults: return self.defaults.options(section) raise else: if self.defaults: try: options.extend(self.defaults.options(section)) options = list_unique(options) except NoSectionError: pass return options def get(self, section, option, default=_DEFAULT): """get(section, option[, default]) -> str If default is not given or set, raises Error in case of an error """ try: return self._config.get(section, option) except Error: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.get(section, option) except Error: pass raise return default def gettext(self, *args, **kwargs): value = self.get(*args, **kwargs) # make sure there are no surrogates value.encode("utf-8") return value def getbytes(self, section, option, default=_DEFAULT): try: value = self._config.get(section, option) value = value.encode("utf-8", "surrogateescape") return value except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getbytes(section, option) except Error: pass raise Error(e) return default def getboolean(self, section, option, default=_DEFAULT): """getboolean(section, option[, default]) -> bool If default is not given or set, raises Error in case of an error """ try: return self._config.getboolean(section, option) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getboolean(section, option) except Error: pass raise Error(e) return default def getint(self, section, option, default=_DEFAULT): """getint(section, option[, default]) -> int If default is not give or set, raises Error in case of an error """ try: return int(self._config.getfloat(section, option)) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getint(section, option) except Error: pass raise Error(e) return default def getfloat(self, section, option, default=_DEFAULT): """getfloat(section, option[, default]) -> float If default is not give or set, raises Error in case of an error """ try: return self._config.getfloat(section, option) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getfloat(section, option) except Error: pass raise Error(e) return default def getstringlist(self, section, option, default=_DEFAULT): """getstringlist(section, option[, default]) -> list If default is not given or set, raises Error in case of an error. Gets a list of strings, using CSV to parse and delimit. """ try: value = self._config.get(section, option) parser = csv.reader([value], lineterminator='\n', quoting=csv.QUOTE_MINIMAL) try: vals = next(parser) except (csv.Error, ValueError) as e: raise Error(e) return vals except Error as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getstringlist(section, option) except Error: pass raise Error(e) return default def setstringlist(self, section, option, values): """Saves a list of unicode strings using the csv module""" sw = StringIO() values = [str(v) for v in values] writer = csv.writer(sw, lineterminator='\n', quoting=csv.QUOTE_MINIMAL) writer.writerow(values) self.set(section, option, sw.getvalue()) def setlist(self, section, option, values, sep=","): """Saves a list of str using ',' as a separator and \\ for escaping""" values = [str(v) for v in values] joined = join_escape(values, sep) self.set(section, option, joined) def getlist(self, section, option, default=_DEFAULT, sep=","): """Returns a str list saved with setlist()""" try: value = self._config.get(section, option) return split_escape(value, sep) except (Error, ValueError) as e: if default is _DEFAULT: if self.defaults is not None: try: return self.defaults.getlist(section, option, sep=sep) except Error: pass raise Error(e) return default def set(self, section, option, value): """Saves the string representation for the passed value Don't pass unicode, encode first. """ if isinstance(value, bytes): raise TypeError("use setbytes") # RawConfigParser only allows string values but doesn't # scream if they are not (and it only fails before the # first config save..) if not isinstance(value, str): value = str(value) try: self._config.set(section, option, value) except NoSectionError: if self.defaults and self.defaults.has_section(section): self._config.add_section(section) self._config.set(section, option, value) else: raise def settext(self, section, option, value): value = str(value) # make sure there are no surrogates value.encode("utf-8") self.set(section, option, value) def setbytes(self, section, option, value): assert isinstance(value, bytes) value = value.decode("utf-8", "surrogateescape") self.set(section, option, value) def write(self, filename): """Write config to filename. Can raise EnvironmentError """ assert isinstance(filename, fsnative) mkdir(os.path.dirname(filename)) # temporary set the new version for saving if self._version is not None: self.add_section("__config__") self.set("__config__", "version", self._version) try: with atomic_save(filename, "wb") as fileobj: temp = StringIO() self._config.write(temp) data = temp.getvalue().encode("utf-8", "surrogateescape") fileobj.write(data) finally: if self._loaded_version is not None: self.set("__config__", "version", self._loaded_version) def clear(self): """Remove all sections.""" for section in self._config.sections(): self._config.remove_section(section) def is_empty(self): """Whether the config has any sections""" return not self._config.sections() def read(self, filename): """Reads the config from `filename` if the file exists, otherwise does nothing Can raise EnvironmentError, Error. """ try: with open(filename, "rb") as fileobj: fileobj = StringIO(fileobj.read().decode( "utf-8", "surrogateescape")) self._config.readfp(fileobj, filename) except (IOError, OSError): return # don't upgrade if we just created a new config if self._version is not None: self._loaded_version = self.getint("__config__", "version", -1) for func in self._upgrade_funcs: self._do_upgrade(func) def has_option(self, section, option): """If the given section exists, and contains the given option""" return self._config.has_option( section, option) or (self.defaults and self.defaults.has_option(section, option)) def has_section(self, section): """If the given section exists""" return self._config.has_section(section) or ( self.defaults and self.defaults.has_section(section)) def remove_option(self, section, option): """Remove the specified option from the specified section Can raise Error. """ return self._config.remove_option(section, option) def add_section(self, section): """Add a section named section to the instance if it not already exists.""" if not self._config.has_section(section): self._config.add_section(section)
class Config(object): """Hold configuration state and utility functions related to config state. This is kind of a catch all for functionality related to the current configuration. """ def __init__(self, filename=None): self.c = RawConfigParser() if filename: if not os.path.exists(filename): raise ValueError('config file does not exist: %s' % filename) self.c.read(filename) if self.c.has_section('path_rewrites'): self._path_rewrites = self.c.items('path_rewrites') else: self._path_rewrites = [] if self.c.has_section('pull_url_rewrites'): self._pull_url_rewrites = self.c.items('pull_url_rewrites') else: self._pull_url_rewrites = [] if self.c.has_section('public_url_rewrites'): self._public_url_rewrites = self.c.items('public_url_rewrites') else: self._public_url_rewrites = [] if self.c.has_section('replicationpathrewrites'): self._replication_path_rewrites = self.c.items('replicationpathrewrites') else: self._replication_path_rewrites = [] if self.c.has_section('replicationrules'): re_includes, re_excludes = [], [] self.path_includes, self.path_excludes = {}, {} for key, value in self.c.items('replicationrules'): (behaviour, name), (ruletype, rule) = key.split('.'), value.split(':') if ruletype == 're': # Decide which list is correct and append to it restore = re_includes if behaviour == 'include' else re_excludes restore.append((name, rule)) elif ruletype == 'path': exstore = self.path_includes if behaviour == 'include' else self.path_excludes exstore[rule] = name else: raise Exception('bad ruletype %s' % ruletype) # Create the in/out rules as an `or` of all the rules includes_string = '|'.join( create_namedgroup(name, rule) for name, rule in re_includes ) excludes_string = '|'.join( create_namedgroup(name, rule) for name, rule in re_excludes ) self.include_regex = re.compile(includes_string) if includes_string else None self.exclude_regex = re.compile(excludes_string) if excludes_string else None self.has_filters = bool(self.path_includes or self.path_excludes or self.include_regex or self.exclude_regex) else: self.has_filters = False def get(self, section, option): return pycompat.sysstr(self.c.get(section, option)) @property def hg_path(self): """Path to a hg executable.""" if self.c.has_section('programs') and self.c.has_option('programs', 'hg'): return self.get('programs', 'hg') return 'hg' def is_backup(self): """Return `True` if the consumer is acting as a backup of core repo data. """ return ( self.c.has_section('consumer') and self.c.getboolean('consumer', 'backup', fallback=False) ) def parse_wire_repo_path(self, path): """Parse a normalized repository path into a local path.""" for source, dest in self._path_rewrites: if path.startswith(source): return path.replace(source, dest) return path def get_replication_path_rewrite(self, path): """Parse a local path into a wire path""" for source, dest in self._replication_path_rewrites: if path.startswith(source): return dest + path[len(source):] return None def get_pull_url_from_repo_path(self, path): """Obtain a URL to be used for pulling from a local repo path.""" for source, dest in self._pull_url_rewrites: if path.startswith(source): return dest + path[len(source):] return None def get_public_url_from_wire_path(self, path): """Obtain a URL to be used for public advertisement from a wire protocol path.""" for source, dest in self._public_url_rewrites: if path.startswith(source): return dest + path[len(source):] return None def filter(self, repo): """Returns a RepoFilterResult indicating if the repo should be filtered out of the set and which rule performed the include/exclude. If the repo was not touched by any rule, we default to disallowing the repo to be replicated. This rule is called "noinclude". If there were no filters defined at all, we pass the filter. This rule is called "nofilter". """ if not self.has_filters: return RepoFilterResult(True, 'nofilter') if repo in self.path_includes: return RepoFilterResult(True, self.path_includes[repo]) if repo in self.path_excludes: return RepoFilterResult(False, self.path_excludes[repo]) includematch = self.include_regex.match(repo) if self.include_regex else None excludematch = self.exclude_regex.match(repo) if self.exclude_regex else None # Repo passes through filter if matching an include rule # and not matching an exclude rule if includematch and not excludematch: matchkeys = iter(includematch.groupdict().keys()) return RepoFilterResult(True, next(matchkeys)) # Return specific exclude rule if there was a match if excludematch: matchkeys = iter(excludematch.groupdict().keys()) return RepoFilterResult(False, next(matchkeys)) # Use "noinclude" if we didn't get a match for an include rule return RepoFilterResult(False, 'noinclude') def get_client_from_section(self, section, timeout=-1): """Obtain a KafkaClient from a config section. The config section must have a ``hosts`` and ``client_id`` option. An optional ``connect_timeout`` defines the connection timeout. ``timeout`` specifies how many seconds to retry attempting to connect to Kafka in case the initial connection failed. -1 indicates to not retry. This is useful when attempting to connect to a cluster that may still be coming online, for example. """ hosts = self.get(section, 'hosts') client_id = self.get(section, 'client_id') connect_timeout = 60 if self.c.has_option(section, 'connect_timeout'): connect_timeout = self.c.getint(section, 'connect_timeout') start = time.time() while True: try: return SimpleClient(hosts, client_id=client_id, timeout=connect_timeout) except KafkaUnavailableError: if timeout == -1: raise if time.time() - start > timeout: raise Exception('timeout reached trying to connect to Kafka') time.sleep(0.1)
class MrxsFile(object): def __init__(self, filename): # Split filename dirname, ext = os.path.splitext(filename) if ext != '.mrxs': raise UnrecognizedFile # Parse slidedat self._slidedatfile = os.path.join(dirname, 'Slidedat.ini') self._dat = RawConfigParser() self._dat.optionxform = str try: with open(self._slidedatfile, 'rb') as fh: self._have_bom = (fh.read(len(UTF8_BOM)) == UTF8_BOM) if not self._have_bom: fh.seek(0) self._dat.readfp(fh) except IOError: raise UnrecognizedFile # Get file paths self._indexfile = os.path.join( dirname, self._dat.get(MRXS_HIERARCHICAL, 'INDEXFILE')) self._datafiles = [ os.path.join(dirname, self._dat.get('DATAFILE', 'FILE_%d' % i)) for i in range(self._dat.getint('DATAFILE', 'FILE_COUNT')) ] # Build levels self._make_levels() def _make_levels(self): self._levels = {} self._level_list = [] layer_count = self._dat.getint(MRXS_HIERARCHICAL, 'NONHIER_COUNT') for layer_id in range(layer_count): level_count = self._dat.getint(MRXS_HIERARCHICAL, 'NONHIER_%d_COUNT' % layer_id) for level_id in range(level_count): level = MrxsNonHierLevel(self._dat, layer_id, level_id, len(self._level_list)) self._levels[(level.layer_name, level.name)] = level self._level_list.append(level) @classmethod def _read_int32(cls, f): buf = f.read(4) if len(buf) != 4: raise IOError('Short read') return struct.unpack('<i', buf)[0] @classmethod def _assert_int32(cls, f, value): v = cls._read_int32(f) if v != value: raise ValueError('%d != %d' % (v, value)) def _get_data_location(self, record): with open(self._indexfile, 'rb') as fh: fh.seek(MRXS_NONHIER_ROOT_OFFSET) # seek to record table_base = self._read_int32(fh) fh.seek(table_base + record * 4) # seek to list head list_head = self._read_int32(fh) fh.seek(list_head) # seek to data page self._assert_int32(fh, 0) page = self._read_int32(fh) fh.seek(page) # check pagesize self._assert_int32(fh, 1) # read rest of prologue self._read_int32(fh) self._assert_int32(fh, 0) self._assert_int32(fh, 0) # read values position = self._read_int32(fh) size = self._read_int32(fh) fileno = self._read_int32(fh) return (self._datafiles[fileno], position, size) def _zero_record(self, record): path, offset, length = self._get_data_location(record) with open(path, 'r+b') as fh: fh.seek(0, 2) do_truncate = (fh.tell() == offset + length) if DEBUG: if do_truncate: print('Truncating', path, 'to', offset) else: print('Zeroing', path, 'at', offset, 'for', length) fh.seek(offset) buf = fh.read(len(JPEG_SOI)) if buf != JPEG_SOI: raise IOError('Unexpected data in nonhier image') if do_truncate: fh.truncate(offset) else: fh.seek(offset) fh.write('\0' * length) def _delete_index_record(self, record): if DEBUG: print('Deleting record', record) with open(self._indexfile, 'r+b') as fh: entries_to_move = len(self._level_list) - record - 1 if entries_to_move == 0: return # get base of table fh.seek(MRXS_NONHIER_ROOT_OFFSET) table_base = self._read_int32(fh) # read tail of table fh.seek(table_base + (record + 1) * 4) buf = fh.read(entries_to_move * 4) if len(buf) != entries_to_move * 4: raise IOError('Short read') # overwrite the target record fh.seek(table_base + record * 4) fh.write(buf) def _hier_keys_for_level(self, level): ret = [] for k, _ in self._dat.items(MRXS_HIERARCHICAL): if k == level.key_prefix or k.startswith(level.key_prefix + '_'): ret.append(k) return ret def _rename_section(self, old, new): if self._dat.has_section(old): if DEBUG: print('[%s] -> [%s]' % (old, new)) self._dat.add_section(new) for k, v in self._dat.items(old): self._dat.set(new, k, v) self._dat.remove_section(old) elif DEBUG: print('[%s] does not exist' % old) def _delete_section(self, section): if DEBUG: print('Deleting [%s]' % section) self._dat.remove_section(section) def _set_key(self, section, key, value): if DEBUG: prev = self._dat.get(section, key) print('[%s] %s: %s -> %s' % (section, key, prev, value)) self._dat.set(section, key, value) def _rename_key(self, section, old, new): if DEBUG: print('[%s] %s -> %s' % (section, old, new)) v = self._dat.get(section, old) self._dat.remove_option(section, old) self._dat.set(section, new, v) def _delete_key(self, section, key): if DEBUG: print('Deleting [%s] %s' % (section, key)) self._dat.remove_option(section, key) def _write(self): buf = StringIO() self._dat.write(buf) with open(self._slidedatfile, 'wb') as fh: if self._have_bom: fh.write(UTF8_BOM) fh.write(buf.getvalue().replace('\n', '\r\n')) def delete_level(self, layer_name, level_name): level = self._levels[(layer_name, level_name)] record = level.record # Zero image data self._zero_record(record) # Delete pointer from nonhier table in index self._delete_index_record(record) # Remove slidedat keys for k in self._hier_keys_for_level(level): self._delete_key(MRXS_HIERARCHICAL, k) # Remove slidedat section self._delete_section(level.section) # Rename section and keys for subsequent levels in the layer prev_level = level for cur_level in self._level_list[record + 1:]: if cur_level.layer_id != prev_level.layer_id: break for k in self._hier_keys_for_level(cur_level): new_k = k.replace(cur_level.key_prefix, prev_level.key_prefix, 1) self._rename_key(MRXS_HIERARCHICAL, k, new_k) self._set_key(MRXS_HIERARCHICAL, prev_level.section_key, prev_level.section) self._rename_section(cur_level.section, prev_level.section) prev_level = cur_level # Update level count within layer count_k = 'NONHIER_%d_COUNT' % level.layer_id count_v = self._dat.getint(MRXS_HIERARCHICAL, count_k) self._set_key(MRXS_HIERARCHICAL, count_k, count_v - 1) # Write slidedat self._write() # Refresh metadata self._make_levels()
class APIVersionWriter(TemplateFileWriter): """ Provide useful method to write Java files. """ def __init__(self, monolithe_config, api_info): """ Initializes a _JavaSDKAPIVersionFileWriter """ super(APIVersionWriter, self).__init__(package="monolithe.generators.lang.java") self.api_version = api_info["version"] self._api_version_string = SDKUtils.get_string_version( self.api_version) self.api_root = api_info["root"] self.api_prefix = api_info["prefix"] self.monolithe_config = monolithe_config self._output = self.monolithe_config.get_option( "output", "transformer") self._name = self.monolithe_config.get_option("name", "transformer") self._class_prefix = "" self._product_accronym = self.monolithe_config.get_option( "product_accronym") self._product_name = self.monolithe_config.get_option("product_name") self._url = self.monolithe_config.get_option("url", "transformer") self._package_prefix = self._get_package_prefix(self._url) self._package_name = self._package_prefix + '.' + self._name + '.' + SDKUtils.get_string_version( self.api_version) self._package_subdir = self._package_name.replace('.', '/') self._base_output_directory = "%s/java" % (self._output) self.output_directory = "%s/src/main/java/%s" % ( self._base_output_directory, self._package_subdir) self.override_folder = os.path.normpath("%s/__overrides" % self._base_output_directory) self.fetchers_path = "/fetchers/" self.attrs_defaults = RawConfigParser() path = "%s/java/__attributes_defaults/attrs_defaults.ini" % self._output self.attrs_defaults.optionxform = str self.attrs_defaults.read(path) self.attrs_types = RawConfigParser() path = "%s/java/__attributes_defaults/attrs_types.ini" % self._output self.attrs_types.optionxform = str self.attrs_types.read(path) self.library_version = self.monolithe_config.get_option( "version", "transformer") with open("%s/java/__code_header" % self._output, "r") as f: self.header_content = f.read() def perform(self, specifications): """ """ self._set_enum_list_local_type(specifications) self._write_info() self._write_session() self._write_build_file() task_manager = TaskManager() for rest_name, specification in specifications.items(): task_manager.start_task(method=self._write_model, specification=specification, specification_set=specifications) task_manager.start_task(method=self._write_fetcher, specification=specification, specification_set=specifications) task_manager.wait_until_exit() def _write_session(self): """ Write SDK session file Args: version (str): the version of the server """ base_name = "%sSession" % self._product_accronym filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=self.output_directory, filename=filename, template_name="session.java.tpl", version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, name=self._name, api_prefix=self.api_prefix, override_content=override_content, header=self.header_content, version_string=self._api_version_string, package_name=self._package_name) def _write_info(self): """ Write API Info file """ self.write(destination=self.output_directory, filename="SdkInfo.java", template_name="sdkinfo.java.tpl", version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, api_prefix=self.api_prefix, product_name=self._product_name, name=self._name, header=self.header_content, version_string=self._api_version_string, package_name=self._package_name) def _write_model(self, specification, specification_set): """ Write autogenerate specification file """ filename = "%s%s.java" % (self._class_prefix, specification.entity_name) override_content = self._extract_override_content( specification.entity_name) superclass_name = "RestRootObject" if specification.rest_name == self.api_root else "RestObject" defaults = {} section = specification.entity_name if self.attrs_defaults.has_section(section): for attribute in self.attrs_defaults.options(section): defaults[attribute] = self.attrs_defaults.get( section, attribute) self.write(destination=self.output_directory, filename=filename, template_name="model.java.tpl", specification=specification, specification_set=specification_set, version=self.api_version, name=self._name, class_prefix=self._class_prefix, product_accronym=self._product_accronym, override_content=override_content, superclass_name=superclass_name, header=self.header_content, version_string=self._api_version_string, package_name=self._package_name, attribute_defaults=defaults) return (filename, specification.entity_name) def _write_fetcher(self, specification, specification_set): """ Write fetcher """ destination = "%s%s" % (self.output_directory, self.fetchers_path) base_name = "%sFetcher" % specification.entity_name_plural filename = "%s%s.java" % (self._class_prefix, base_name) override_content = self._extract_override_content(base_name) self.write(destination=destination, filename=filename, template_name="fetcher.java.tpl", specification=specification, specification_set=specification_set, class_prefix=self._class_prefix, product_accronym=self._product_accronym, override_content=override_content, header=self.header_content, name=self._name, version_string=self._api_version_string, package_name=self._package_name) return (filename, specification.entity_name_plural) def _write_build_file(self): """ Write Maven build file (pom.xml) """ self.write(destination=self._base_output_directory, filename="pom.xml", template_name="pom.xml.tpl", version=self.api_version, product_accronym=self._product_accronym, class_prefix=self._class_prefix, root_api=self.api_root, api_prefix=self.api_prefix, product_name=self._product_name, name=self._name, header=self.header_content, version_string=self._api_version_string, package_prefix=self._package_prefix, library_version=self.library_version) def _extract_override_content(self, name): """ """ # find override file specific_override_path = "%s/%s_%s%s.override.java" % ( self.override_folder, self.api_version, self._class_prefix, name.title()) generic_override_path = "%s/%s%s.override.java" % ( self.override_folder, self._class_prefix, name.title()) final_path = specific_override_path if os.path.exists( specific_override_path) else generic_override_path # Read override from file override_content = None if os.path.isfile(final_path): override_content = open(final_path).read() return override_content def _get_package_prefix(self, url): """ """ hostname_parts = self._get_hostname_parts(url) package_name = "" for index, hostname_part in enumerate(reversed(hostname_parts)): package_name = package_name + hostname_part if index < len(hostname_parts) - 1: package_name = package_name + '.' return package_name def _get_hostname_parts(self, url): """ """ if url.find("http://") != 0: url = "http://" + url hostname = urlparse(url).hostname hostname_parts = hostname.split('.') valid_hostname_parts = [] for hostname_part in hostname_parts: if hostname_part != "www": valid_hostname_parts.append(hostname_part) return valid_hostname_parts def _set_enum_list_local_type(self, specifications): """ This method is needed until get_type_name() is enhanced to include specification subtype and local_name """ for rest_name, specification in specifications.items(): for attribute in specification.attributes: if attribute.type == "enum": enum_type = attribute.local_name[0:1].upper( ) + attribute.local_name[1:] attribute.local_type = enum_type elif attribute.type == "object": attr_type = "Object" if self.attrs_types.has_option(specification.entity_name, attribute.local_name): type = self.attrs_types.get(specification.entity_name, attribute.local_name) if type: attr_type = type attribute.local_type = attr_type elif attribute.type == "list": if attribute.subtype == "enum": enum_subtype = attribute.local_name[0:1].upper( ) + attribute.local_name[1:] attribute.local_type = "java.util.List<E" + enum_subtype + ">" elif attribute.subtype == "object": attr_subtype = "com.fasterxml.jackson.databind.JsonNode" if self.attrs_types.has_option( specification.entity_name, attribute.local_name): subtype = self.attrs_types.get( specification.entity_name, attribute.local_name) if subtype: attr_subtype = subtype attribute.local_type = "java.util.List<" + attr_subtype + ">" elif attribute.subtype == "entity": attribute.local_type = "java.util.List<com.fasterxml.jackson.databind.JsonNode>" elif attribute.subtype == "string": attribute.local_type = "java.util.List<String>" elif attribute.subtype == "JSON": attribute.local_type = "java.util.List<String>" else: attribute.local_type = "java.util.List<" + attribute.subtype + ">"
class INIConfig(IConfig): ROOTSECT = 'ROOT' def __init__(self, path): self.path = path self.values = OrderedDict() self.config = RawConfigParser() def load(self, default={}): self.values = OrderedDict(default) if os.path.exists(self.path): logging.debug(u'Loading application configuration file: %s.' % self.path) if sys.version_info.major < 3: self.config.readfp(io.open(self.path, "r", encoding='utf-8')) else: self.config.read(self.path, encoding='utf-8') for section in self.config.sections(): args = section.split(':') if args[0] == self.ROOTSECT: args.pop(0) for key, value in self.config.items(section): self.set(*(args + [key, value])) # retro compatibility if len(self.config.sections()) == 0: first = True for key, value in self.config.items(DEFAULTSECT): if first: logging.warning('The configuration file "%s" uses an old-style' % self.path) logging.warning('Please rename the %s section to %s' % (DEFAULTSECT, self.ROOTSECT)) first = False self.set(key, value) logging.debug(u'Application configuration file loaded: %s.' % self.path) else: self.save() logging.debug(u'Application configuration file created with default values: %s. ' 'Please customize it.' % self.path) return self.values def save(self): def save_section(values, root_section=self.ROOTSECT): for k, v in values.items(): if isinstance(v, (int, Decimal, float, basestring)): if not self.config.has_section(root_section): self.config.add_section(root_section) self.config.set(root_section, k, unicode(v)) elif isinstance(v, dict): new_section = ':'.join((root_section, k)) if (root_section != self.ROOTSECT or k == self.ROOTSECT) else k if not self.config.has_section(new_section): self.config.add_section(new_section) save_section(v, new_section) save_section(self.values) with io.open(self.path, 'w', encoding='utf-8') as f: self.config.write(f) def get(self, *args, **kwargs): default = None if 'default' in kwargs: default = kwargs['default'] v = self.values for k in args[:-1]: if k in v: v = v[k] else: return default try: return v[args[-1]] except KeyError: return default def set(self, *args): v = self.values for k in args[:-2]: if k not in v: v[k] = OrderedDict() v = v[k] v[args[-2]] = args[-1] def delete(self, *args): v = self.values for k in args[:-1]: if k not in v: return v = v[k] v.pop(args[-1], None)
class OktaAuthConfig(): """ Config helper class """ def __init__(self, logger): self.logger = logger self.config_path = os.path.expanduser('~') + '/.okta-aws' self._value = RawConfigParser() self._value.read(self.config_path) def base_url_for(self, okta_profile): """ Gets base URL from config """ if self._value.has_option(okta_profile, 'base-url'): base_url = self._value.get(okta_profile, 'base-url') self.logger.info("Authenticating to: %s" % base_url) else: base_url = self._value.get('default', 'base-url') self.logger.info("Using base-url from default profile %s" % base_url) return base_url def app_link_for(self, okta_profile): """ Gets app_link from config """ app_link = None if self._value.has_option(okta_profile, 'app-link'): app_link = self._value.get(okta_profile, 'app-link') elif self._value.has_option('default', 'app-link'): app_link = self._value.get('default', 'app-link') self.logger.info("App Link set as: %s" % app_link) return app_link def username_for(self, okta_profile): """ Gets username from config """ if self._value.has_option(okta_profile, 'username'): username = self._value.get(okta_profile, 'username') elif self._value.has_option('default', 'username'): username = self._value.get('default', 'username') else: username = input('Enter username: '******'password'): password = self._value.get(okta_profile, 'password') elif self._value.has_option('default', 'password'): password = self._value.get('default', 'password') else: password = getpass('Enter password: '******'factor'): factor = self._value.get(okta_profile, 'factor') self.logger.debug("Setting MFA factor to %s" % factor) return factor return None def save_chosen_role_for_profile(self, okta_profile, role_arn): """ Gets role from config """ if not self._value.has_section(okta_profile): self._value.add_section(okta_profile) base_url = self.base_url_for(okta_profile) self._value.set(okta_profile, 'base-url', base_url) self._value.set(okta_profile, 'role', role_arn) with open(self.config_path, 'w+') as configfile: self._value.write(configfile) def save_chosen_app_link_for_profile(self, okta_profile, app_link): """ Gets role from config """ if not self._value.has_section(okta_profile): self._value.add_section(okta_profile) base_url = self.base_url_for(okta_profile) self._value.set(okta_profile, 'base-url', base_url) self._value.set(okta_profile, 'app-link', app_link) with open(self.config_path, 'w+') as configfile: self._value.write(configfile)
class Session(object): """Loads and saves sessions.""" def __init__(self, api): self.api = api self.factory = TimelineFactory(api) self.sessions_conf = RawConfigParser() self.sessions = { DEFAULT_SESSION: { VISIBLE: HOME_TIMELINE, BUFFERS: ', '.join([ MENTIONS_TIMELINE, FAVORITES_TIMELINE, MESSAGES_TIMELINE, OWN_TWEETS_TIMELINE ]) } } if not path.isfile(SESSIONS_FILE): # create the sessions file logging.info(_('Sessions file created')) self.init_sessions_file() def init_sessions_file(self): """Create the `SESSIONS_FILE`.""" self.sessions_conf.add_section(DEFAULT_SESSION) self.sessions_conf.set(DEFAULT_SESSION, VISIBLE, self.sessions[DEFAULT_SESSION][VISIBLE]) self.sessions_conf.set(DEFAULT_SESSION, BUFFERS, self.sessions[DEFAULT_SESSION][BUFFERS]) # create the file and write the `default` session with open(SESSIONS_FILE, 'w') as sessions_fp: self.sessions_conf.write(sessions_fp) def load_from_session_conf(self, session_name): """ Load the session `session_name` from :attr:session_conf to :attr:sessions dictionary. """ # we assume that the `visible` option is present visible = self.sessions_conf.get(session_name, VISIBLE) # `buffers` option is not required, prevent loading the default # buffers when the aforementioned option is not present if self.sessions_conf.has_option(session_name, BUFFERS): buffers = self.sessions_conf.get(session_name, BUFFERS) else: buffers = '' self.sessions[session_name] = { VISIBLE: visible, BUFFERS: buffers, } def populate(self, timeline_list, session=None): """Populate `timeline_list` with the session timelines.""" session_name = configuration.session # read the `SESSIONS_FILE` self.sessions_conf.read(SESSIONS_FILE) if self.sessions_conf.has_section(session_name): self.load_from_session_conf(session_name) session_dict = self.sessions[session_name] else: # `configuration.session` does not exist, load default session session_dict = self.sessions[DEFAULT_SESSION] visible_names = session_dict[VISIBLE] buffers_names = session_dict[BUFFERS] self.append_visible_timelines(visible_names, timeline_list) self.append_background_timelines(buffers_names, timeline_list) def append_visible_timelines(self, visible_string, timeline_list): """" Given a `visible_string` with the names of the visible timelines, append them to `timeline_list` and make them all visible. """ visible_names = clean_timeline_list_string(visible_string) # append first timeline (is always visible) first_timeline_name = visible_names.pop(0) first_timeline = self.factory(first_timeline_name) timeline_list.append_timeline(first_timeline) # append the rest of the visible timelines, expanding `timeline_list` # visible columns for showing the visible timelines for timeline_name in visible_names: timeline_list.append_timeline(self.factory(timeline_name)) timeline_list.expand_visible_next() def append_background_timelines(self, buffers_string, timeline_list): """ Given a `buffers_string` with the names of the timelines that should be loaded in the background, append them to `timeline_list`. """ buffers_names = clean_timeline_list_string(buffers_string) for timeline_name in buffers_names: timeline_list.append_timeline(self.factory(timeline_name))
def __init__(self, filename): config = RawConfigParser() config.read(filename) self.switches = [] self.default_url = Config._get_val(config, 'settings', 'default_url', None) self.interface = Config._get_val(config, 'settings', 'interface', '0.0.0.0') self.ip_filter = Config._get_val(config, 'settings', 'ip_filter', '0.0.0.0/0').split('/') self.ip_filter[0] = struct.unpack('>L', socket.inet_aton( self.ip_filter[0]))[0] if len(self.ip_filter) == 1: self.ip_filter.append(32) elif len(self.ip_filter) == 2: self.ip_filter[1] = int(self.ip_filter[1]) else: raise ConfigError('Bad IP address format specified for IP filter') if config.has_section('switches'): for cfg, url in config.items('switches'): parsed_cfg = dict(h=None, s=None, b=None, k=None, p=None) for param in cfg.lower().split(','): if param in ('on', 'off'): parsed_cfg['p'] = param == 'on' elif param[-1] in parsed_cfg: parsed_cfg[param[-1]] = int(param[:-1]) else: raise ConfigError( 'Unknown parameter %s while parsing %s = %s' % (param[-1], cfg, url)) self.switches.append((parsed_cfg, url)) #special config for specific URLs url_openers = [] for top_level_url in config.sections(): if not top_level_url.startswith( 'http://') and top_level_url.startswith('https://'): continue auth = Config._get_val(config, top_level_url, 'auth', None) if auth == 'basic': username = Config._get_val(config, top_level_url, 'username', None) password = Config._get_val(config, top_level_url, 'password', None) if username is None: raise ConfigError( "'username' parameter is required when using basic HTTP authentication" ) if password is None: raise ConfigError( "'password' parameter is required when using basic HTTP authentication" ) password_mgr = HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, top_level_url, username, password) url_openers.append(HTTPBasicAuthHandler(password_mgr)) install_opener(build_opener(*url_openers))
It also supports gconf like connection, so you get notices when a property has changed. """ import sys import os import atexit from configparser import RawConfigParser from pychess.System.Log import log from pychess.System.prefix import addUserConfigPrefix configParser = RawConfigParser() section = "General" path = addUserConfigPrefix("config") if os.path.isfile(path): configParser.readfp(open(path)) if not configParser.has_section(section): configParser.add_section(section) atexit.register(lambda: configParser.write(open(path, "w"))) idkeyfuncs = {} conid = 0 def notify_add(key, func, *args): """The signature for func must be self, client, *args, **kwargs""" assert isinstance(key, str) global conid idkeyfuncs[conid] = (key, func, args) conid += 1 return conid - 1
class Config(object): """This class is used to access/read config file, if it exists. :param location: the custom path to search for config file :type location: str or None """ def __init__(self, location=None): self.location = location self.config_filename = 'glances.conf' self.parser = RawConfigParser() self._loaded_config_file = None self.load() def load(self): """Load a config file from the list of paths, if it exists.""" for config_file in self.get_config_paths(): if os.path.isfile( config_file) and os.path.getsize(config_file) > 0: try: if is_py3: self.parser.read(config_file, encoding='utf-8') else: self.parser.read(config_file) logger.info( "Read configuration file '{0}'".format(config_file)) except UnicodeDecodeError as e: logger.error( "Cannot decode configuration file '{0}': {1}".format( config_file, e)) sys.exit(1) # Save the loaded configuration file path (issue #374) self._loaded_config_file = config_file break def get_loaded_config_file(self): """Return the loaded configuration file""" return self._loaded_config_file def get_config_paths(self): r"""Get a list of config file paths. The list is built taking into account of the OS, priority and location. * running from source: /path/to/glances/conf * per-user install: ~/.local/etc/glances (Unix-like only) * Linux: ~/.config/glances, /etc/glances * BSD: ~/.config/glances, /usr/local/etc/glances * Mac: ~/Library/Application Support/glances, /usr/local/etc/glances * Windows: %APPDATA%\glances The config file will be searched in the following order of priority: * /path/to/file (via -C flag) * /path/to/glances/conf * user's local directory (per-user install settings) * user's home directory (per-user settings) * {/usr/local,}/etc directory (system-wide settings) """ paths = [] conf_path = os.path.realpath( os.path.join(work_path, '..', '..', 'conf')) if self.location is not None: paths.append(self.location) if os.path.exists(conf_path): paths.append(os.path.join(conf_path, self.config_filename)) if not is_windows: paths.append( os.path.join(os.path.expanduser('~/.local'), 'etc', appname, self.config_filename)) if is_linux or is_bsd: paths.append( os.path.join( os.environ.get('XDG_CONFIG_HOME') or os.path.expanduser('~/.config'), appname, self.config_filename)) if hasattr(sys, 'real_prefix') or is_bsd: paths.append( os.path.join(sys.prefix, 'etc', appname, self.config_filename)) else: paths.append( os.path.join('/etc', appname, self.config_filename)) elif is_mac: paths.append( os.path.join( os.path.expanduser('~/Library/Application Support/'), appname, self.config_filename)) paths.append( os.path.join(sys_prefix, 'etc', appname, self.config_filename)) elif is_windows: paths.append( os.path.join(os.environ.get('APPDATA'), appname, self.config_filename)) return paths def items(self, section): """Return the items list of a section.""" return self.parser.items(section) def has_section(self, section): """Return info about the existence of a section.""" return self.parser.has_section(section) def get_option(self, section, option): """Get the float value of an option, if it exists.""" try: value = self.parser.getfloat(section, option) except NoOptionError: return else: return value def get_raw_option(self, section, option): """Get the raw value of an option, if it exists.""" try: value = self.parser.get(section, option) except NoOptionError: return else: return value