コード例 #1
0
    def load_rcfile(self):
        if os.path.exists(self.RCFILE):

            config = RawConfigParser()
            config.optionxform = lambda x: x.upper()
            config.read(self.RCFILE)

            items = []

            if config.has_section('global'):
                items.extend(config.items('global'))
            if self.NAME is not None and config.has_section(self.NAME):
                items.extend(config.items(self.NAME))

            for k,v in items:
                if k in CONFIG_TYPES and getattr(self,k) == DEFAULT_SETTINGS[k]:
                    if CONFIG_TYPES[k] == 'str':
                        setattr(self, k, v)
                    elif CONFIG_TYPES[k] == 'int':
                        setattr(self, k, int(v))
                    elif CONFIG_TYPES[k] == 'float':
                        setattr(self, k, float(v))
                    elif CONFIG_TYPES[k] == 'list':
                        setattr(self, k, [i.strip() for i in v.split(",")])
                    elif CONFIG_TYPES[k] == 'bool':
                        if v.lower() in ('1', 'yes', 'true', 'on'):
                            setattr(self, k, True)
                        elif v.lower() in ('0', 'no', 'false', 'off'):
                            setattr(self, k, False)
                        else:
                            raise ValueError("Not a boolean: %s" % v)
コード例 #2
0
def settingsFromFile(infile, defaults):
    """Given a path string :attr:`infile`, load settings and return them as
    dictionary.

    Args:
        infile (str): a path to a file
        defaults (dict): a dictionary containing fallback values
    """
    config = RawConfigParser()
    config.optionxform = lambda option: option
    try:
        with open(infile) as f:
            try:
                config.read_file(f)
            except MissingSectionHeaderError:
                config['General'] = defaults
    except OSError:
        config['General'] = defaults
    for key in defaults:
        if key not in config['General']:
            config['General'][key] = defaults[key]
    try:
        if int(config['General']['UpdateEvery']) <= 0:
            config['General']['UpdateEvery'] = defaults['UpdateEvery']
    except ValueError:
        # can't convert to integer
        config['General']['UpdateEvery'] = defaults['UpdateEvery']
    for value in ('SyncNewCourses', ):
        try:
            booleanvalue = config.getboolean('General', value)
            config['General'][value] = str(booleanvalue)
        except ValueError:
            # can't convert to boolean
            config['General'][value] = defaults[value]
    return dict(config['General'])
コード例 #3
0
    def run_generator(self, expect_error=False):
        '''Run sysv-generator.

        Fail if stderr contains any "Fail", unless expect_error is True.
        Return (stderr, filename -> ConfigParser) pair with ouput to stderr and
        parsed generated units.
        '''
        env = os.environ.copy()
        env['SYSTEMD_LOG_LEVEL'] = 'debug'
        env['SYSTEMD_SYSVINIT_PATH'] = self.init_d_dir
        env['SYSTEMD_SYSVRCND_PATH'] = self.rcnd_dir
        env['SYSTEMD_UNIT_PATH'] = self.unit_dir
        gen = subprocess.Popen(
            [sysv_generator, 'ignored', 'ignored', self.out_dir],
            stdout=subprocess.PIPE, stderr=subprocess.PIPE,
            universal_newlines=True, env=env)
        (out, err) = gen.communicate()
        if not expect_error:
            self.assertFalse('Fail' in err, err)
        self.assertEqual(gen.returncode, 0, err)

        results = {}
        for service in glob(self.out_dir + '/*.service'):
            if os.path.islink(service):
                continue
            cp = RawConfigParser()
            cp.optionxform = lambda o: o  # don't lower-case option names
            with open(service) as f:
                cp.readfp(f)
            results[os.path.basename(service)] = cp

        return (err, results)
コード例 #4
0
ファイル: config.py プロジェクト: kernt/ploy
 def read_config(self, config):
     result = []
     stack = [config]
     while 1:
         config = stack.pop()
         src = None
         if isinstance(config, (str, unicode)):
             src = os.path.relpath(config)
         _config = RawConfigParser()
         _config.optionxform = lambda s: s
         if getattr(config, 'read', None) is not None:
             _config.readfp(config)
             path = self.path
         else:
             if not os.path.exists(config):
                 log.error("Config file '%s' doesn't exist.", config)
                 sys.exit(1)
             _config.read(config)
             path = os.path.dirname(config)
         for section in reversed(_config.sections()):
             for key, value in reversed(_config.items(section)):
                 result.append((src, path, section, key, value))
             result.append((src, path, section, None, None))
         if _config.has_option('global', 'extends'):
             extends = _config.get('global', 'extends').split()
         elif _config.has_option('global:global', 'extends'):
             extends = _config.get('global:global', 'extends').split()
         else:
             break
         stack[0:0] = [
             os.path.abspath(os.path.join(path, x))
             for x in reversed(extends)]
     return reversed(result)
コード例 #5
0
ファイル: settings.py プロジェクト: dchangtw/flent
    def load_rcfile(self):
        self.process_args()
        if self.RCFILE == parser.get_default('RCFILE') and \
           not os.path.exists(self.RCFILE) and os.path.exists(OLD_RCFILE):
            logger.warning("Using old rcfile found at %s, "
                           "please rename to %s.",
                           OLD_RCFILE, self.RCFILE)
            self.RCFILE = OLD_RCFILE
        if os.path.exists(self.RCFILE):

            config = RawConfigParser()
            config.optionxform = lambda x: x.upper()
            config.read(self.RCFILE)

            items = []

            if config.has_section('global'):
                items.extend(config.items('global'))
            if self.NAME is not None and config.has_section(self.NAME):
                items.extend(config.items(self.NAME))
            try:
                return self.parse_rcvalues(items)
            except (ValueError, argparse.ArgumentTypeError) as e:
                raise RuntimeError("Unable to parse RC values: %s" % e)
        return {}
コード例 #6
0
ファイル: config.py プロジェクト: johnbachman/indra
def populate_config_dict(config_path):
    """Load the configuration file into the config_file dictionary

    A ConfigParser-style configuration file can have multiple sections, but
    we ignore the section distinction  and load the key/value pairs from all
    sections into a single key/value list.
    """
    try:
        config_dict = {}
        parser = RawConfigParser()
        parser.optionxform = lambda x: x
        parser.read(config_path)
        sections = parser.sections()
        for section in sections:
            options = parser.options(section)
            for option in options:
                config_dict[option] = str(parser.get(section, option))
    except Exception as e:
        logger.warning("Could not load configuration file due to exception. "
                       "Only environment variable equivalents will be used.")
        return None

    for key in config_dict.keys():
        if config_dict[key] == '':
            config_dict[key] = None
        elif isinstance(config_dict[key], str):
            config_dict[key] = os.path.expanduser(config_dict[key])
    return config_dict
コード例 #7
0
ファイル: manager.py プロジェクト: jnphilipp/Feedindicator
 def save(self):
     """Save configuration to file."""
     parser = RawConfigParser()
     parser.optionxform = str
     parser.read_dict({'Options': self._configs})
     with open(os.path.join(app_config_dir, 'config'), 'w',
               encoding='utf-8') as f:
         parser.write(f)
コード例 #8
0
ファイル: dogtaginstance.py プロジェクト: tiran/freeipa
    def create_spawn_config(self, subsystem_config):
        """Create config instance
        """
        section_name = self.defaults['pki_subsystem']
        cfgtpl, immutable_keys = self._get_default_config()

        # overwrite CA/KRA config with subsystem settings
        subsystem_config = self._mangle_values(subsystem_config)
        for key, value in subsystem_config.items():
            cfgtpl.set(section_name, key, value)

        # get a mapping of settings that cannot be modified by users
        immutable_settings = {
            k: v for k, v in cfgtpl.items(section_name)
            if k in immutable_keys
        }

        # add ipaca_customize overlay,
        # These are settings that can be modified by a user, too. We use
        # ipaca_customize.ini to set sensible defaults.
        with open(self.ipaca_customize) as f:
            cfgtpl.read_file(f)

        # load external overlay from command line
        if self.pki_config_override is not None:
            with open(self.pki_config_override) as f:
                cfgtpl.read_file(f)

        # verify again
        self._verify_immutable(
            cfgtpl, immutable_settings, self.pki_config_override
        )

        # key backup is not compatible with HSM support
        if (cfgtpl.has_option(section_name, 'pki_hsm_enable') and
                cfgtpl.getboolean(section_name, 'pki_hsm_enable')):
            cfgtpl.set(section_name, 'pki_backup_keys', 'False')
            cfgtpl.set(section_name, 'pki_backup_password', '')

        pki_token_name = cfgtpl.get(section_name, 'pki_token_name')
        for stanza in self.token_stanzas:
            if cfgtpl.has_option(section_name, stanza):
                cfgtpl.set(section_name, stanza, pki_token_name)

        # Next up, get rid of interpolation variables, DEFAULT,
        # irrelevant sections and unused variables. Only the subsystem
        # section is copied into a new raw config parser. A raw config
        # parser is necessary, because ConfigParser.write() write passwords
        # with '%' in a way, that is not accepted by Dogtag.
        config = RawConfigParser()
        config.optionxform = str
        config.add_section(section_name)
        for key, value in sorted(cfgtpl.items(section=section_name)):
            if key.startswith('pki_'):
                config.set(section_name, key, value)

        return config
コード例 #9
0
ファイル: diff1c.py プロジェクト: Cujoko/diff1c
def get_setting(section, key):
    settings_config_file_path_rel = Path('diff1c.ini')
    if not settings_config_file_path_rel.exists():
        settings_config_file_path_rel = Path.home() / settings_config_file_path_rel
        if not settings_config_file_path_rel.exists():
            raise Exception('Файл настроек не существует!')
    config = RawConfigParser()
    config.optionxform = lambda option: option
    config.read(str(settings_config_file_path_rel), 'utf-8')
    return config[section][key]  # fixme
コード例 #10
0
ファイル: camb4py.py プロジェクト: marius311/camb4py
def read_ini(ini):
    """Load an ini file or string into a dictionary."""
    if isinstance(ini,dict): return ini
    if isinstance(ini,str):
        if os.path.exists(ini): ini = open(ini).read()
        config = RawConfigParser()
        config.optionxform=str
        config.readfp(StringIO(u'[root]\n'+ini))
        return dict(config.items('root'))
    else:
        raise ValueError('Unexpected type for ini file %s'%type(ini))
コード例 #11
0
def default_settings_file(tmpdir):
    conf = RawConfigParser()
    conf.optionxform = lambda option: option
    conf['General'] = {
        'UpdateEvery': '60',
        'RootFolder': 'root',
        'SyncNewCourses': 'False'
    }
    f = tmpdir.mkdir('fold').join('sets.ini')
    with open(str(f), 'w') as setsfile:
        conf.write(setsfile)
    return str(f)
コード例 #12
0
def settingsToFile(insettings, filepath):
    """Given a dict, save to file in the format specified by configparser"""
    config = RawConfigParser()
    config.optionxform = lambda option: option
    config['General'] = insettings
    try:
        dirpath = os.path.dirname(filepath)
        os.makedirs(dirpath, exist_ok=True)
        with open(filepath, 'w') as f:
            config.write(f)
    except OSError:
        if not os.path.isdir(dirpath):
            raise
コード例 #13
0
    def load_rcfile(self):
        if os.path.exists(self.RCFILE):

            config = RawConfigParser()
            config.optionxform = lambda x: x.upper()
            config.read(self.RCFILE)

            items = []

            if config.has_section('global'):
                items.extend(config.items('global'))
            if self.NAME is not None and config.has_section(self.NAME):
                items.extend(config.items(self.NAME))
            self.load_rcvalues(items)
        self.update_implications()
コード例 #14
0
    def __init__(self, monolithe_config, api_info):
        """ Initializes a _JavaSDKAPIVersionFileWriter

        """
        super(APIVersionWriter, self).__init__(package="monolithe.generators.lang.java")

        self.api_version = api_info["version"]
        self._api_version_string = SDKUtils.get_string_version(self.api_version)
        self.api_root = api_info["root"]
        self.api_prefix = api_info["prefix"]

        self.monolithe_config = monolithe_config
        self._output = self.monolithe_config.get_option("output", "transformer")
        self._name = self.monolithe_config.get_option("name", "transformer")
        self._class_prefix = ""
        self._product_accronym = self.monolithe_config.get_option("product_accronym")
        self._product_name = self.monolithe_config.get_option("product_name")
        self._url = self.monolithe_config.get_option("url", "transformer")

        self._package_prefix = self._get_package_prefix(self._url)
        self._package_name = self._package_prefix + '.' + self._name + '.' + SDKUtils.get_string_version(self.api_version)
        self._package_subdir = self._package_name.replace('.', '/')

        self._base_output_directory = "%s/java" % (self._output)
        self.output_directory = "%s/src/main/java/%s" % (self._base_output_directory, self._package_subdir)
        self.override_folder = os.path.normpath("%s/__overrides" % self._base_output_directory)
        self.fetchers_path = "/fetchers/"

        self.attrs_defaults = RawConfigParser()
        path = "%s/java/__attributes_defaults/attrs_defaults.ini" % self._output
        self.attrs_defaults.optionxform = str
        self.attrs_defaults.read(path)

        self.attrs_types = RawConfigParser()
        path = "%s/java/__attributes_defaults/attrs_types.ini" % self._output
        self.attrs_types.optionxform = str
        self.attrs_types.read(path)

        library_info = RawConfigParser()
        path = "%s/java/__attributes_defaults/library.ini" % self._output
        library_info.optionxform = str
        library_info.read(path)
        self.library_version = library_info.get(self.api_version, "libraryVersion")

        with open("%s/java/__code_header" % self._output, "r") as f:
            self.header_content = f.read()
コード例 #15
0
ファイル: utils.py プロジェクト: collective/transmogrifier
def load_config(configuration_id, seen=None, **overrides):  # flake8: noqa
    if seen is None:
        seen = []
    if configuration_id in seen:
        raise ValueError(
            'Recursive configuration extends: %s (%r)' % (configuration_id,
                                                          seen))
    seen.append(configuration_id)

    if ':' in configuration_id:
        configuration_file = resolvePackageReference(configuration_id)
    else:
        config_info = configuration_registry.getConfiguration(configuration_id)
        configuration_file = config_info['configuration']

    parser = RawConfigParser()
    parser.optionxform = str  # case sensitive
    parser.readfp(open(configuration_file))

    result = {}
    includes = None
    for section in parser.sections():
        result[section] = dict(parser.items(section))
        if section == 'transmogrifier':
            includes = result[section].pop('include', includes)

    if includes:
        for configuration_id in includes.split()[::-1]:
            include = load_config(configuration_id, seen)
            sections = set(include.keys()) | set(result.keys())
            for section in sections:
                result[section] = update_section(
                    result.get(section, {}), include.get(section, {}))

    seen.pop()

    for section, options in iteritems(overrides):
        assert section in result, \
            'Overrides include non-existing section {0:s}'.format(section)
        for key, value in iteritems(options):
            assert key in result[section], \
                'Overrides include non-existing key {0:s}:{1:s}'.format(
                    section, key)
            result[section][key] = value

    return result
コード例 #16
0
def test_savefileexists(tmpdir, default_settings_dict):
    f = tmpdir.mkdir('fold').join('sets.ini')
    conf = RawConfigParser()
    conf.optionxform = lambda option: option
    conf['General'] = {
        'UpdateEvery': '30',
        'RootFolder': 'anotherroot',
        'SyncNewCourses': 'False'
    }
    with open(str(f), 'w') as setsfile:
        conf.write(setsfile)
    newsettings = {
        'UpdateEvery': '20',
        'RootFolder': 'finalroot',
        'SyncNewCourses': 'True'
    }
    settingsToFile(newsettings, str(f))
    assert settingsFromFile(str(f), default_settings_dict) == newsettings
コード例 #17
0
def test_somepresent_allinvalid(tmpdir, default_settings_dict):
    """Not all values present, all are invalid, file accessible"""
    conf = RawConfigParser()
    conf.optionxform = lambda option: option
    conf['General'] = {
        'UpdateEvery': 'noatanumer',
        'SyncNewCourses': 'False'
    }
    f = tmpdir.mkdir('fold').join('sets.ini')
    with open(str(f), 'w') as setsfile:
        conf.write(setsfile)
    sets = settingsFromFile(str(f), default_settings_dict)
    nice = {
        'UpdateEvery': '60',
        'RootFolder': 'root',
        'SyncNewCourses': 'False'
    }
    assert sets == nice
コード例 #18
0
def test_notallvaluesarepresent(tmpdir, default_settings_dict):
    """Not all values are present; all are valid, file is accessible"""
    conf = RawConfigParser()
    conf.optionxform = lambda option: option
    conf['General'] = {
        'UpdateEvery': '50',
        'RootFolder': 'custom',
    }
    f = tmpdir.mkdir('fold').join('sets.ini')
    with open(str(f), 'w') as setsfile:
        conf.write(setsfile)
    sets = settingsFromFile(str(f), default_settings_dict)
    merge = {
        'UpdateEvery': '50',
        'RootFolder': 'custom',
        'SyncNewCourses': 'False'
    }
    assert sets == merge
コード例 #19
0
def save_entries(fp, entries):

    parser = RawConfigParser()
    parser.optionxform = str
    parser.add_section(PERSISTENT_SECTION)

    for name, value in entries:
        if not value:
            continue

        t = value.type
        v = value.value

        if t == NT_BOOLEAN:
            name = 'boolean "%s"' % _escape_string(name)
            vrepr = "true" if v else "false"
        elif t == NT_DOUBLE:
            name = 'double "%s"' % _escape_string(name)
            vrepr = str(v)
        elif t == NT_STRING:
            name = 'string "%s"' % _escape_string(name)
            vrepr = '"%s"' % _escape_string(v)
        elif t == NT_RAW:
            name = 'raw "%s"' % _escape_string(name)
            vrepr = base64.b64encode(v).decode("ascii")
        elif t == NT_BOOLEAN_ARRAY:
            name = 'array boolean "%s"' % _escape_string(name)
            vrepr = ",".join(["true" if vv else "false" for vv in v])
        elif t == NT_DOUBLE_ARRAY:
            name = 'array double "%s"' % _escape_string(name)
            vrepr = ",".join([str(vv) for vv in v])
        elif t == NT_STRING_ARRAY:
            name = 'array string "%s"' % _escape_string(name)
            vrepr = '","'.join([_escape_string(vv) for vv in v])
            if vrepr:
                vrepr = '"%s"' % vrepr
        else:
            continue

        parser.set(PERSISTENT_SECTION, name, vrepr)

    parser.write(fp, space_around_delimiters=False)
コード例 #20
0
ファイル: settings.py プロジェクト: dtaht/flent
    def load_rcfile(self):
        if self.RCFILE == DEFAULT_SETTINGS['RCFILE'] and \
           not os.path.exists(self.RCFILE) and os.path.exists(OLD_RCFILE):
            sys.stderr.write("Warning: Old rcfile found at %s, please rename to %s.\n" \
                             % (OLD_RCFILE, self.RCFILE))
            self.RCFILE = OLD_RCFILE
        if os.path.exists(self.RCFILE):

            config = RawConfigParser()
            config.optionxform = lambda x: x.upper()
            config.read(self.RCFILE)

            items = []

            if config.has_section('global'):
                items.extend(config.items('global'))
            if self.NAME is not None and config.has_section(self.NAME):
                items.extend(config.items(self.NAME))
            self.load_rcvalues(items)
        self.update_implications()
コード例 #21
0
ファイル: core.py プロジェクト: betsman/ctapipe
	def read(self, filenames, impl=FITS, implementation=DATAIMPL,
				encoding=None):
		"""
		Read filename or a list of filenames and parse configuration entries.

		Files that cannot be opened are silently ignored; this is
		designed so that you can specify a list of potential
		configuration file locations (e.g. current directory, user's
		home directory, system wide directory), and all existing
		configuration files in the list will be read.  A single
		filename may also be given.

		Parameters:
		-----------
		filename: str
			Full path name or list of full path name containing configuration
			entries to parse
		impl: str , optional
				FITS -> use Fits format
				INI  -> use windows style ini format
		implementation: str , optional
			DATAIMPL -> Use Fits data table
			HEADERIMP -> Use fits header

		Returns
		-------
		list of successfully read files.
		"""
		if impl == self.INI:
			config_parser = RawConfigParser()

			config_parser.optionxform = lambda option: option
			success_list = config_parser.read(filenames, encoding)
			self._addOptionFromParser(config_parser)
			return success_list

		elif impl == self.FITS:
			return self._read_fits(filenames, implementation, encoding)
		else:
			print("Format:", impl, 'not allowed', file=sys.stderr)
			return list()
コード例 #22
0
ファイル: sysv-generator-test.py プロジェクト: embe/systemd
    def run_generator(self, expect_error=False):
        '''Run sysv-generator.

        Fail if stderr contains any "Fail", unless expect_error is True.
        Return (stderr, filename -> ConfigParser) pair with ouput to stderr and
        parsed generated units.
        '''
        env = os.environ.copy()
        env['SYSTEMD_LOG_LEVEL'] = 'debug'
        env['SYSTEMD_LOG_TARGET'] = 'console'
        env['SYSTEMD_SYSVINIT_PATH'] = self.init_d_dir
        env['SYSTEMD_SYSVRCND_PATH'] = self.rcnd_dir
        env['SYSTEMD_UNIT_PATH'] = self.unit_dir
        gen = subprocess.Popen(
            [sysv_generator, 'ignored', 'ignored', self.out_dir],
            stdout=subprocess.PIPE, stderr=subprocess.PIPE,
            universal_newlines=True, env=env)
        (out, err) = gen.communicate()
        if not expect_error:
            self.assertFalse('Fail' in err, err)
        self.assertEqual(gen.returncode, 0, err)

        results = {}
        for service in glob(self.out_dir + '/*.service'):
            if os.path.islink(service):
                continue
            try:
                # for python3 we need here strict=False to parse multiple
                # lines with the same key
                cp = RawConfigParser(dict_type=MultiDict, strict=False)
            except TypeError:
                # RawConfigParser in python2 does not have the strict option
                # but it allows multiple lines with the same key by default
                cp = RawConfigParser(dict_type=MultiDict)
            cp.optionxform = lambda o: o  # don't lower-case option names
            with open(service) as f:
                cp.readfp(f)
            results[os.path.basename(service)] = cp

        return (err, results)
コード例 #23
0
    def load_rcfile(self):
        self.process_args()
        if self.RCFILE == parser.get_default('RCFILE') and \
           not os.path.exists(self.RCFILE) and os.path.exists(OLD_RCFILE):
            sys.stderr.write("Warning: Using old rcfile found at %s, "
                             "please rename to %s.\n"
                             % (OLD_RCFILE, self.RCFILE))
            self.RCFILE = OLD_RCFILE
        if os.path.exists(self.RCFILE):

            config = RawConfigParser()
            config.optionxform = lambda x: x.upper()
            config.read(self.RCFILE)

            items = []

            if config.has_section('global'):
                items.extend(config.items('global'))
            if self.NAME is not None and config.has_section(self.NAME):
                items.extend(config.items(self.NAME))
            return self.parse_rcvalues(items)
        return {}
コード例 #24
0
ファイル: main.py プロジェクト: collective/transmogrifier
def get_overrides(arguments):
    overrides = {}

    overrides_path = arguments.get('--overrides')
    if overrides_path and not os.path.isabs(overrides_path):
        overrides_path = os.path.join(os.getcwd(), overrides_path)
    if overrides_path:
        parser = RawConfigParser()
        parser.optionxform = str  # case sensitive
        with open(overrides_path) as fp:
            parser.readfp(fp)
        overrides.update(dict(((section, dict(parser.items(section)))
                               for section in parser.sections())))

    for candidate in arguments.get('<pipelines_and_overrides>'):
        try:
            section, name, value = parse_override(candidate)
        except ValueError:
            continue
        overrides.setdefault(section, {})
        overrides[section][name] = value

    return overrides
コード例 #25
0
ファイル: manager.py プロジェクト: jnphilipp/Feedindicator
 def load(self):
     """Load configurations from file."""
     parser = RawConfigParser()
     parser.optionxform = str
     parser.read(os.path.join(app_config_dir, 'config'))
     if parser.has_option('Options', 'autostart'):
         self.autostart = parser.getboolean('Options', 'autostart')
     if parser.has_option('Options', 'refreshtime'):
         self.refreshtime = parser.getint('Options', 'refreshtime')
     if parser.has_option('Options', 'stoptimer'):
         self.stoptimer = parser.getboolean('Options', 'stoptimer')
     if parser.has_option('Options', 'items_per_feed'):
         self.items_per_feed = parser.getint('Options', 'items_per_feed')
     if parser.has_option('Options', 'show_notifications'):
         self.show_notifications = parser.getboolean('Options',
                                                     'show_notifications')
     if parser.has_option('Options', 'show_update_notifications'):
         self.show_update_notifications = parser. \
             getboolean('Options', 'show_update_notifications')
     if parser.has_option('Options', 'feeds_at_top'):
         self.feeds_at_top = parser.getboolean('Options', 'feeds_at_top')
     if parser.has_option('Options', 'show_unread_feeds'):
         self.show_unread_feeds = parser.getboolean('Options',
                                                    'show_unread_feeds')
コード例 #26
0
ファイル: __init__.py プロジェクト: artemiljin/bumpversion
def main(original_args=None):
    positionals, args = split_args_in_optional_and_positional(
        sys.argv[1:] if original_args is None else original_args)

    if len(positionals[1:]) > 2:
        warnings.warn(
            "Giving multiple files on the command line will be deprecated, please use [bumpversion:file:...] in a config file.",
            PendingDeprecationWarning)

    parser1 = argparse.ArgumentParser(add_help=False)

    parser1.add_argument('--verbose',
                         action='count',
                         default=0,
                         help='Print verbose logging to stderr',
                         required=False)

    parser1.add_argument('--list',
                         action='store_true',
                         default=False,
                         help='List machine readable information',
                         required=False)

    known_args, remaining_argv = parser1.parse_known_args(args)

    logformatter = logging.Formatter('%(message)s')

    if len(logger.handlers) == 0:
        ch = logging.StreamHandler(sys.stderr)
        ch.setFormatter(logformatter)
        logger.addHandler(ch)

    if len(logger_list.handlers) == 0:
        ch2 = logging.StreamHandler(sys.stdout)
        ch2.setFormatter(logformatter)
        logger_list.addHandler(ch2)

    if known_args.list:
        logger_list.setLevel(1)

    log_level = {
        0: logging.WARNING,
        1: logging.INFO,
        2: logging.DEBUG,
    }.get(known_args.verbose, logging.DEBUG)

    logger.setLevel(log_level)

    logger.debug("Starting {}".format(DESCRIPTION))

    defaults = {}
    vcs_info = {}

    config = RawConfigParser('')

    # don't transform keys to lowercase (which would be the default)
    config.optionxform = lambda option: option

    config.add_section('bumpversion')

    # We need setup.py to get the major, minor versions
    ver_sources = ['setup.py', 'plugin.json', 'VERSION']
    ver_source = ver_file_check(ver_sources)
    if ver_source is None:
        message = "Could not read any of {} file".format(str(ver_sources))
        logger.error(message)
        sys.exit(2)
    # We don't work with other configuration files except .bumpversion.cfg
    config_file = '.bumpversion.cfg'
    if not os.path.exists(config_file):
        message = "Could not read {} file".format(config_file)
        logger.error(message)
        sys.exit(2)

    part_configs = {}

    files = []

    logger.info("Reading config file {}:".format(config_file))
    logger.info(io.open(config_file, 'rt', encoding='utf-8').read())

    config.readfp(io.open(config_file, 'rt', encoding='utf-8'))

    log_config = StringIO()
    config.write(log_config)

    if 'files' in dict(config.items("bumpversion")):
        warnings.warn(
            "'files =' configuration is will be deprecated, please use [bumpversion:file:...]",
            PendingDeprecationWarning)

    defaults.update(dict(config.items("bumpversion")))

    for listvaluename in ("serialize", ):
        try:
            value = config.get("bumpversion", listvaluename)
            defaults[listvaluename] = list(
                filter(None, (x.strip() for x in value.splitlines())))
        except NoOptionError:
            pass  # no default value then ;)

    for boolvaluename in "dry_run":
        try:
            defaults[boolvaluename] = config.getboolean(
                "bumpversion", boolvaluename)
        except NoOptionError:
            pass  # no default value then ;)

    for section_name in config.sections():

        section_name_match = re.compile("^bumpversion:(file|part):(.+)").match(
            section_name)

        if not section_name_match:
            continue

        section_prefix, section_value = section_name_match.groups()

        section_config = dict(config.items(section_name))

        if section_prefix == "part":

            ThisVersionPartConfiguration = NumericVersionPartConfiguration

            if 'values' in section_config:
                section_config['values'] = list(
                    filter(None,
                           (x.strip()
                            for x in section_config['values'].splitlines())))
                ThisVersionPartConfiguration = ConfiguredVersionPartConfiguration

            part_configs[section_value] = ThisVersionPartConfiguration(
                **section_config)

        elif section_prefix == "file":

            filename = section_value

            if 'serialize' in section_config:
                section_config['serialize'] = list(
                    filter(
                        None,
                        (x.strip()
                         for x in section_config['serialize'].splitlines())))

            section_config['part_configs'] = part_configs

            if not 'parse' in section_config:
                section_config['parse'] = defaults.get(
                    "parse", '(?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)')

            if not 'serialize' in section_config:
                section_config['serialize'] = defaults.get(
                    'serialize', [str('{major}.{minor}.{patch}')])

            if not 'search' in section_config:
                section_config['search'] = defaults.get(
                    "search", '{current_version}')

            if not 'replace' in section_config:
                section_config['replace'] = defaults.get(
                    "replace", '{new_version}')

            files.append(
                ConfiguredFile(filename, VersionConfig(**section_config)))

    parser2 = argparse.ArgumentParser(prog='bumpversion',
                                      add_help=False,
                                      parents=[parser1])
    parser2.set_defaults(**defaults)

    parser2.add_argument('--parse',
                         metavar='REGEX',
                         help='Regex parsing the version string',
                         default=defaults.get(
                             "parse",
                             '(?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)'))
    parser2.add_argument('--serialize',
                         metavar='FORMAT',
                         action=DiscardDefaultIfSpecifiedAppendAction,
                         help='How to format what is parsed back to a version',
                         default=defaults.get(
                             "serialize", [str('{major}.{minor}.{patch}')]))
    parser2.add_argument('--search',
                         metavar='SEARCH',
                         help='Template for complete string to search',
                         default=defaults.get("search", '{current_version}'))
    parser2.add_argument('--replace',
                         metavar='REPLACE',
                         help='Template for complete string to replace',
                         default=defaults.get("replace", '{new_version}'))

    known_args, remaining_argv = parser2.parse_known_args(args)

    defaults.update(vars(known_args))

    assert type(known_args.serialize) == list

    context = dict(
        list(time_context.items()) + list(prefixed_environ().items()) +
        list(vcs_info.items()))

    try:
        vc = VersionConfig(
            parse=known_args.parse,
            serialize=known_args.serialize,
            search=known_args.search,
            replace=known_args.replace,
            part_configs=part_configs,
        )
    except sre_constants.error as e:
        sys.exit(1)

    current_version = vc.parse(
        known_args.current_version) if known_args.current_version else None
    leave_config_ver = True
    new_version = None
    if len(positionals) > 0:
        setup_version, zero_patch_setup_version = ConfiguredFile(
            ver_source, vc).find(positionals[0])
        compare = setup_version.compare(vc.order(), current_version)
        for part in compare:
            if part == positionals[0]:
                continue
            else:
                leave_config_ver = leave_config_ver and compare[part]

        try:
            if leave_config_ver and current_version:
                logger.info("Attempting to increment part '{}'".format(
                    positionals[0]))
                new_version = current_version.bump(positionals[0], vc.order())
                logger.info("Values are now: " +
                            keyvaluestring(new_version._values))
                defaults['new_version'] = vc.serialize(new_version, context)
            elif not leave_config_ver:
                logger.info("Using Version from {}".format(ver_source))
                defaults['new_version'] = vc.serialize(
                    zero_patch_setup_version, context)
                new_version = zero_patch_setup_version
                logger.info("Values are now: " +
                            keyvaluestring(setup_version._values))
        except MissingValueForSerializationException as e:
            logger.info("Opportunistic finding of new_version failed: " +
                        e.message)
        except IncompleteVersionRepresenationException as e:
            logger.info("Opportunistic finding of new_version failed: " +
                        e.message)
        except KeyError as e:
            logger.info("Opportunistic finding of new_version failed")
    parser3 = argparse.ArgumentParser(
        prog='bumpversion',
        description=DESCRIPTION,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
        conflict_handler='resolve',
        parents=[parser2],
    )

    parser3.set_defaults(**defaults)

    parser3.add_argument('--dry-run',
                         '-n',
                         action='store_true',
                         default=False,
                         help="Don't write any files, just pretend.")

    file_names = []
    if 'files' in defaults:
        assert defaults['files'] != None
        file_names = defaults['files'].split(' ')

    parser3.add_argument('part', help='Part of the version to be bumped.')
    parser3.add_argument('files',
                         metavar='file',
                         nargs='*',
                         help='Files to change',
                         default=file_names)
    args = parser3.parse_args(remaining_argv + positionals)

    if args.dry_run:
        logger.info("Dry run active, won't touch any files.")

    # make sure files exist and contain version string
    # if leave_config_ver and new_version:
    logger.info("Update info in {}".format(ver_source))
    ConfiguredFile(ver_source, vc).replace(setup_version, new_version, context,
                                           args.dry_run)
    config.set('bumpversion', 'new_version', args.new_version)

    for key, value in config.items('bumpversion'):
        logger_list.info("{}={}".format(key, value))

    config.remove_option('bumpversion', 'new_version')

    config.set('bumpversion', 'current_version', args.new_version)

    new_config = StringIO()

    try:
        write_to_config_file = not args.dry_run

        logger.info("{} to config file {}:".format(
            "Would write" if not write_to_config_file else "Writing",
            config_file,
        ))

        config.write(new_config)
        logger.info(new_config.getvalue())

        if write_to_config_file:
            with io.open(config_file, 'wb') as f:
                f.write(new_config.getvalue().encode('utf-8'))

    except UnicodeEncodeError:
        warnings.warn(
            "Unable to write UTF-8 to config file, because of an old configparser version. "
            "Update with `pip install --upgrade configparser`.")
コード例 #27
0
ファイル: setup.py プロジェクト: wichert/guess_language
def load_config(file="setup.cfg"):
    config = RawConfigParser()
    config.optionxform = lambda x: x.lower().replace("_", "-")
    config.read(file)
    return config
コード例 #28
0
def main(original_args=None):

    positionals, args = split_args_in_optional_and_positional(
        sys.argv[1:] if original_args is None else original_args)

    if len(positionals[1:]) > 2:
        warnings.warn(
            "Giving multiple files on the command line will be deprecated, please use [bumpversion:file:...] in a config file.",
            PendingDeprecationWarning)

    parser1 = argparse.ArgumentParser(add_help=False)

    parser1.add_argument(
        '--config-file',
        metavar='FILE',
        default=argparse.SUPPRESS,
        required=False,
        help=
        'Config file to read most of the variables from (default: .bumpversion.cfg)'
    )

    parser1.add_argument('--verbose',
                         action='count',
                         default=0,
                         help='Print verbose logging to stderr',
                         required=False)

    parser1.add_argument('--list',
                         action='store_true',
                         default=False,
                         help='List machine readable information',
                         required=False)

    parser1.add_argument('--allow-dirty',
                         action='store_true',
                         default=False,
                         help="Don't abort if working directory is dirty",
                         required=False)

    known_args, remaining_argv = parser1.parse_known_args(args)

    logformatter = logging.Formatter('%(message)s')

    if len(logger.handlers) == 0:
        ch = logging.StreamHandler(sys.stderr)
        ch.setFormatter(logformatter)
        logger.addHandler(ch)

    if len(logger_list.handlers) == 0:
        ch2 = logging.StreamHandler(sys.stdout)
        ch2.setFormatter(logformatter)
        logger_list.addHandler(ch2)

    if known_args.list:
        logger_list.setLevel(1)

    log_level = {
        0: logging.WARNING,
        1: logging.INFO,
        2: logging.DEBUG,
    }.get(known_args.verbose, logging.DEBUG)

    logger.setLevel(log_level)

    logger.debug("Starting {}".format(DESCRIPTION))

    defaults = {}
    vcs_info = {}

    for vcs in VCS:
        if vcs.is_usable():
            vcs_info.update(vcs.latest_tag_info())

    if 'current_version' in vcs_info:
        defaults['current_version'] = vcs_info['current_version']

    config = RawConfigParser('')

    # don't transform keys to lowercase (which would be the default)
    config.optionxform = lambda option: option

    config.add_section('bumpversion')

    explicit_config = hasattr(known_args, 'config_file')

    if explicit_config:
        config_file = known_args.config_file
    elif not os.path.exists('.bumpversion.cfg') and \
            os.path.exists('setup.cfg'):
        config_file = 'setup.cfg'
    else:
        config_file = '.bumpversion.cfg'

    config_file_exists = os.path.exists(config_file)

    part_configs = {}

    files = []

    if config_file_exists:

        logger.info("Reading config file {}:".format(config_file))
        logger.info(io.open(config_file, 'rt', encoding='utf-8').read())

        config.readfp(io.open(config_file, 'rt', encoding='utf-8'))

        log_config = StringIO()
        config.write(log_config)

        if 'files' in dict(config.items("bumpversion")):
            warnings.warn(
                "'files =' configuration is will be deprecated, please use [bumpversion:file:...]",
                PendingDeprecationWarning)

        defaults.update(dict(config.items("bumpversion")))

        for listvaluename in ("serialize", ):
            try:
                value = config.get("bumpversion", listvaluename)
                defaults[listvaluename] = list(
                    filter(None, (x.strip() for x in value.splitlines())))
            except NoOptionError:
                pass  # no default value then ;)

        for boolvaluename in ("commit", "tag", "dry_run"):
            try:
                defaults[boolvaluename] = config.getboolean(
                    "bumpversion", boolvaluename)
            except NoOptionError:
                pass  # no default value then ;)

        for section_name in config.sections():

            section_name_match = re.compile(
                "^bumpversion:(file|part):(.+)").match(section_name)

            if not section_name_match:
                continue

            section_prefix, section_value = section_name_match.groups()

            section_config = dict(config.items(section_name))

            if section_prefix == "part":

                ThisVersionPartConfiguration = NumericVersionPartConfiguration

                if 'values' in section_config:
                    section_config['values'] = list(
                        filter(
                            None,
                            (x.strip()
                             for x in section_config['values'].splitlines())))
                    ThisVersionPartConfiguration = ConfiguredVersionPartConfiguration

                part_configs[section_value] = ThisVersionPartConfiguration(
                    **section_config)

            elif section_prefix == "file":

                filename = section_value

                if 'serialize' in section_config:
                    section_config['serialize'] = list(
                        filter(
                            None,
                            (x.strip()
                             for x in section_config['serialize'].splitlines()
                             )))

                section_config['part_configs'] = part_configs

                if not 'parse' in section_config:
                    section_config['parse'] = defaults.get(
                        "parse",
                        '(?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)')

                if not 'serialize' in section_config:
                    section_config['serialize'] = defaults.get(
                        'serialize', [str('{major}.{minor}.{patch}')])

                if not 'search' in section_config:
                    section_config['search'] = defaults.get(
                        "search", '{current_version}')

                if not 'replace' in section_config:
                    section_config['replace'] = defaults.get(
                        "replace", '{new_version}')

                files.append(
                    ConfiguredFile(filename, VersionConfig(**section_config)))

    else:
        message = "Could not read config file at {}".format(config_file)
        if explicit_config:
            raise argparse.ArgumentTypeError(message)
        else:
            logger.info(message)

    parser2 = argparse.ArgumentParser(prog='bumpversion',
                                      add_help=False,
                                      parents=[parser1])
    parser2.set_defaults(**defaults)

    parser2.add_argument('--current-version',
                         metavar='VERSION',
                         help='Version that needs to be updated',
                         required=False)
    parser2.add_argument('--parse',
                         metavar='REGEX',
                         help='Regex parsing the version string',
                         default=defaults.get(
                             "parse",
                             '(?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)'))
    parser2.add_argument('--serialize',
                         metavar='FORMAT',
                         action=DiscardDefaultIfSpecifiedAppendAction,
                         help='How to format what is parsed back to a version',
                         default=defaults.get(
                             "serialize", [str('{major}.{minor}.{patch}')]))
    parser2.add_argument('--search',
                         metavar='SEARCH',
                         help='Template for complete string to search',
                         default=defaults.get("search", '{current_version}'))
    parser2.add_argument('--replace',
                         metavar='REPLACE',
                         help='Template for complete string to replace',
                         default=defaults.get("replace", '{new_version}'))

    known_args, remaining_argv = parser2.parse_known_args(args)

    defaults.update(vars(known_args))

    assert type(known_args.serialize) == list

    context = dict(
        list(time_context.items()) + list(prefixed_environ().items()) +
        list(vcs_info.items()))

    try:
        vc = VersionConfig(
            parse=known_args.parse,
            serialize=known_args.serialize,
            search=known_args.search,
            replace=known_args.replace,
            part_configs=part_configs,
        )
    except sre_constants.error as e:
        sys.exit(1)

    current_version = vc.parse(
        known_args.current_version) if known_args.current_version else None

    new_version = None

    if not 'new_version' in defaults and known_args.current_version:
        try:
            if current_version and len(positionals) > 0:
                logger.info("Attempting to increment part '{}'".format(
                    positionals[0]))
                new_version = current_version.bump(positionals[0], vc.order())
                logger.info("Values are now: " +
                            keyvaluestring(new_version._values))
                defaults['new_version'] = vc.serialize(new_version, context)
        except MissingValueForSerializationException as e:
            logger.info("Opportunistic finding of new_version failed: " +
                        e.message)
        except IncompleteVersionRepresenationException as e:
            logger.info("Opportunistic finding of new_version failed: " +
                        e.message)
        except KeyError as e:
            logger.info("Opportunistic finding of new_version failed")

    parser3 = argparse.ArgumentParser(
        prog='bumpversion',
        description=DESCRIPTION,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
        conflict_handler='resolve',
        parents=[parser2],
    )

    parser3.set_defaults(**defaults)

    parser3.add_argument('--current-version',
                         metavar='VERSION',
                         help='Version that needs to be updated',
                         required=not 'current_version' in defaults)
    parser3.add_argument('--dry-run',
                         '-n',
                         action='store_true',
                         default=False,
                         help="Don't write any files, just pretend.")
    parser3.add_argument('--new-version',
                         metavar='VERSION',
                         help='New version that should be in the files',
                         required=not 'new_version' in defaults)

    commitgroup = parser3.add_mutually_exclusive_group()

    commitgroup.add_argument('--commit',
                             action='store_true',
                             dest="commit",
                             help='Commit to version control',
                             default=defaults.get("commit", False))
    commitgroup.add_argument('--no-commit',
                             action='store_false',
                             dest="commit",
                             help='Do not commit to version control',
                             default=argparse.SUPPRESS)

    taggroup = parser3.add_mutually_exclusive_group()

    taggroup.add_argument('--tag',
                          action='store_true',
                          dest="tag",
                          default=defaults.get("tag", False),
                          help='Create a tag in version control')
    taggroup.add_argument('--no-tag',
                          action='store_false',
                          dest="tag",
                          help='Do not create a tag in version control',
                          default=argparse.SUPPRESS)

    signtagsgroup = parser3.add_mutually_exclusive_group()
    signtagsgroup.add_argument('--sign-tags',
                               action='store_true',
                               dest="sign_tags",
                               help='Sign tags if created',
                               default=defaults.get("sign_tags", False))
    signtagsgroup.add_argument('--no-sign-tags',
                               action='store_false',
                               dest="sign_tags",
                               help='Do not sign tags if created',
                               default=argparse.SUPPRESS)

    parser3.add_argument('--tag-name',
                         metavar='TAG_NAME',
                         help='Tag name (only works with --tag)',
                         default=defaults.get('tag_name', 'v{new_version}'))

    parser3.add_argument(
        '--tag-message',
        metavar='TAG_MESSAGE',
        dest='tag_message',
        help='Tag message',
        default=defaults.get(
            'tag_message', 'Bump version: {current_version} → {new_version}'))

    parser3.add_argument(
        '--message',
        '-m',
        metavar='COMMIT_MSG',
        help='Commit message',
        default=defaults.get(
            'message', 'Bump version: {current_version} → {new_version}'))

    file_names = []
    if 'files' in defaults:
        assert defaults['files'] != None
        file_names = defaults['files'].split(' ')

    parser3.add_argument('part', help='Part of the version to be bumped.')
    parser3.add_argument('files',
                         metavar='file',
                         nargs='*',
                         help='Files to change',
                         default=file_names)

    args = parser3.parse_args(remaining_argv + positionals)

    if args.dry_run:
        logger.info("Dry run active, won't touch any files.")

    if args.new_version:
        new_version = vc.parse(args.new_version)

    logger.info("New version will be '{}'".format(args.new_version))

    file_names = file_names or positionals[1:]

    for file_name in file_names:
        files.append(ConfiguredFile(file_name, vc))

    for vcs in VCS:
        if vcs.is_usable():
            try:
                vcs.assert_nondirty()
            except WorkingDirectoryIsDirtyException as e:
                if not defaults['allow_dirty']:
                    logger.warn(
                        "{}\n\nUse --allow-dirty to override this if you know what you're doing."
                        .format(e.message))
                    raise
            break
        else:
            vcs = None

    # make sure files exist and contain version string

    logger.info("Asserting files {} contain the version string:".format(
        ", ".join([str(f) for f in files])))

    for f in files:
        f.should_contain_version(current_version, context)

    # change version string in files
    for f in files:
        f.replace(current_version, new_version, context, args.dry_run)

    commit_files = [f.path for f in files]

    config.set('bumpversion', 'new_version', args.new_version)

    for key, value in config.items('bumpversion'):
        logger_list.info("{}={}".format(key, value))

    config.remove_option('bumpversion', 'new_version')

    config.set('bumpversion', 'current_version', args.new_version)

    new_config = StringIO()

    try:
        write_to_config_file = (not args.dry_run) and config_file_exists

        logger.info("{} to config file {}:".format(
            "Would write" if not write_to_config_file else "Writing",
            config_file,
        ))

        config.write(new_config)
        logger.info(new_config.getvalue())

        if write_to_config_file:
            with io.open(config_file, 'wb') as f:
                f.write(new_config.getvalue().encode('utf-8'))

    except UnicodeEncodeError:
        warnings.warn(
            "Unable to write UTF-8 to config file, because of an old configparser version. "
            "Update with `pip install --upgrade configparser`.")

    if config_file_exists:
        commit_files.append(config_file)

    if not vcs:
        return

    assert vcs.is_usable(), "Did find '{}' unusable, unable to commit.".format(
        vcs.__name__)

    do_commit = (not args.dry_run) and args.commit
    do_tag = (not args.dry_run) and args.tag

    logger.info("{} {} commit".format(
        "Would prepare" if not do_commit else "Preparing",
        vcs.__name__,
    ))

    for path in commit_files:
        logger.info("{} changes in file '{}' to {}".format(
            "Would add" if not do_commit else "Adding",
            path,
            vcs.__name__,
        ))

        if do_commit:
            vcs.add_path(path)

    vcs_context = {
        "current_version": args.current_version,
        "new_version": args.new_version,
    }
    vcs_context.update(time_context)
    vcs_context.update(prefixed_environ())

    commit_message = args.message.format(**vcs_context)

    logger.info("{} to {} with message '{}'".format(
        "Would commit" if not do_commit else "Committing",
        vcs.__name__,
        commit_message,
    ))

    if do_commit:
        vcs.commit(message=commit_message)

    sign_tags = args.sign_tags
    tag_name = args.tag_name.format(**vcs_context)
    tag_message = args.tag_message.format(**vcs_context)
    logger.info("{} '{}' {} in {} and {}".format(
        "Would tag" if not do_tag else "Tagging", tag_name,
        "with message '{}'".format(tag_message)
        if tag_message else "without message", vcs.__name__,
        "signing" if sign_tags else "not signing"))

    if do_tag:
        vcs.tag(sign_tags, tag_name, tag_message)
コード例 #29
0
ファイル: eds.py プロジェクト: christiansandberg/canopen
def import_eds(source, node_id):
    eds = RawConfigParser()
    eds.optionxform = str
    if hasattr(source, "read"):
        fp = source
    else:
        fp = open(source)
    try:
        # Python 3
        eds.read_file(fp)
    except AttributeError:
        # Python 2
        eds.readfp(fp)
    fp.close()
    od = objectdictionary.ObjectDictionary()

    if eds.has_section("FileInfo"):
        od.__edsFileInfo = {
            opt: eds.get("FileInfo", opt)
            for opt in eds.options("FileInfo")
        }

    if eds.has_section("Comments"):
        linecount = int(eds.get("Comments", "Lines"), 0)
        od.comments = '\n'.join([
            eds.get("Comments", "Line%i" % line)
            for line in range(1, linecount + 1)
        ])

    if not eds.has_section("DeviceInfo"):
        logger.warn(
            "eds file does not have a DeviceInfo section. This section is mandatory"
        )
    else:
        for rate in [10, 20, 50, 125, 250, 500, 800, 1000]:
            baudPossible = int(
                eds.get("DeviceInfo", "BaudRate_%i" % rate, fallback='0'), 0)
            if baudPossible != 0:
                od.device_information.allowed_baudrates.add(rate * 1000)

        for t, eprop, odprop in [
            (str, "VendorName", "vendor_name"),
            (int, "VendorNumber", "vendor_number"),
            (str, "ProductName", "product_name"),
            (int, "ProductNumber", "product_number"),
            (int, "RevisionNumber", "revision_number"),
            (str, "OrderCode", "order_code"),
            (bool, "SimpleBootUpMaster", "simple_boot_up_master"),
            (bool, "SimpleBootUpSlave", "simple_boot_up_slave"),
            (bool, "Granularity", "granularity"),
            (bool, "DynamicChannelsSupported", "dynamic_channels_supported"),
            (bool, "GroupMessaging", "group_messaging"),
            (int, "NrOfRXPDO", "nr_of_RXPDO"),
            (int, "NrOfTXPDO", "nr_of_TXPDO"),
            (bool, "LSS_Supported", "LSS_supported"),
        ]:
            try:
                if t in (int, bool):
                    setattr(od.device_information, odprop,
                            t(int(eds.get("DeviceInfo", eprop), 0)))
                elif t is str:
                    setattr(od.device_information, odprop,
                            eds.get("DeviceInfo", eprop))
            except NoOptionError:
                pass

    if eds.has_section("DeviceComissioning"):
        od.bitrate = int(eds.get("DeviceComissioning", "BaudRate")) * 1000
        od.node_id = int(eds.get("DeviceComissioning", "NodeID"), 0)

    for section in eds.sections():
        # Match dummy definitions
        match = re.match(r"^[Dd]ummy[Uu]sage$", section)
        if match is not None:
            for i in range(1, 8):
                key = "Dummy%04d" % i
                if eds.getint(section, key) == 1:
                    var = objectdictionary.Variable(key, i, 0)
                    var.data_type = i
                    var.access_type = "const"
                    od.add_object(var)

        # Match indexes
        match = re.match(r"^[0-9A-Fa-f]{4}$", section)
        if match is not None:
            index = int(section, 16)
            name = eds.get(section, "ParameterName")
            try:
                object_type = int(eds.get(section, "ObjectType"), 0)
            except NoOptionError:
                # DS306 4.6.3.2 object description
                # If the keyword ObjectType is missing, this is regarded as
                # "ObjectType=0x7" (=VAR).
                object_type = VAR
            try:
                storage_location = eds.get(section, "StorageLocation")
            except NoOptionError:
                storage_location = None

            if object_type in (VAR, DOMAIN):
                var = build_variable(eds, section, node_id, index)
                od.add_object(var)
            elif object_type == ARR and eds.has_option(section,
                                                       "CompactSubObj"):
                arr = objectdictionary.Array(name, index)
                last_subindex = objectdictionary.Variable(
                    "Number of entries", index, 0)
                last_subindex.data_type = objectdictionary.UNSIGNED8
                arr.add_member(last_subindex)
                arr.add_member(build_variable(eds, section, node_id, index, 1))
                arr.storage_location = storage_location
                od.add_object(arr)
            elif object_type == ARR:
                arr = objectdictionary.Array(name, index)
                arr.storage_location = storage_location
                od.add_object(arr)
            elif object_type == RECORD:
                record = objectdictionary.Record(name, index)
                record.storage_location = storage_location
                od.add_object(record)

            continue

        # Match subindexes
        match = re.match(r"^([0-9A-Fa-f]{4})[S|s]ub([0-9A-Fa-f]+)$", section)
        if match is not None:
            index = int(match.group(1), 16)
            subindex = int(match.group(2), 16)
            entry = od[index]
            if isinstance(entry,
                          (objectdictionary.Record, objectdictionary.Array)):
                var = build_variable(eds, section, node_id, index, subindex)
                entry.add_member(var)

        # Match [index]Name
        match = re.match(r"^([0-9A-Fa-f]{4})Name", section)
        if match is not None:
            index = int(match.group(1), 16)
            num_of_entries = int(eds.get(section, "NrOfEntries"))
            entry = od[index]
            # For CompactSubObj index 1 is were we find the variable
            src_var = od[index][1]
            for subindex in range(1, num_of_entries + 1):
                var = copy_variable(eds, section, subindex, src_var)
                if var is not None:
                    entry.add_member(var)

    return od
コード例 #30
0
ファイル: units.py プロジェクト: tahayasardemir/OpenMDAO
    does nothing, resulting in a case-sensitive parser.

    Parameters
    ----------
    string : str
        The string to be transformed for the ConfigParser

    Returns
    -------
    str
        The same string that was given as a parameter.
    """
    return string


_UNIT_LIB.optionxform = _do_nothing


def import_library(libfilepointer):
    """
    Import a units library, replacing any existing definitions.

    Parameters
    ----------
    libfilepointer : file
        new library file to work with

    Returns
    -------
    ConfigParser
        newly updated units library for the module
コード例 #31
0
        "calexp": ["instrument", "detector", "visit"],
        "raw": ["instrument", "detector", "exposure"],
        "deepCoadd": ["band", "skymap", "tract", "patch"]
    },
    "hsc_rc2": {
        "IMG_REPO_ROOT": "/project/hsc/gen3repo/rc2w38_ssw42",
        "IMG_DEFAULT_COLLECTION": "RC2/w_2020_38",
        "IMG_OBSCORE_DB": "lsstdb1",
        "IMG_SCHEMA_TABLE": "imgserv.obscore",
        "IMG_DEFAULT_FILTER": "r",
        "calexp": ["instrument", "detector", "visit"],
        "raw": ["instrument", "detector", "exposure"],
        "deepCoadd": ["band", "skymap", "tract", "patch"]
    },
    "default": "hsc_rc2"
}

# load webserv.ini configuration
tasks_parser = RawConfigParser()
tasks_parser.optionxform = str

defaults_file = os.environ.get("WEBSERV_CONFIG", "~/.lsst/webserv.ini")
try:
    with open(os.path.expanduser(defaults_file)) as cfg:
        tasks_parser.read_file(cfg, source=defaults_file)
        # provide access to webserv settings
        webserv_config = dict(tasks_parser.items("webserv"))
except FileNotFoundError:
    # webserv settings not provided
    webserv_config = dict({})
コード例 #32
0
def load_entries(fp, filename, prefix):

    entries = []

    parser = RawConfigParser()
    parser.optionxform = str

    try:
        if hasattr(parser, "read_file"):
            parser.read_file(fp, filename)
        else:
            parser.readfp(fp, filename)
    except IOError:
        raise
    except Exception as e:
        raise IOError("Error reading persistent file: %s" % e)

    try:
        items = parser.items(PERSISTENT_SECTION)
    except NoSectionError:
        raise IOError("Persistent section not found")

    value = None
    m = None

    for k, v in items:

        # Reduces code duplication
        if value:
            key = _unescape_string(m.group(1))
            if key.startswith(prefix):
                entries.append((key, value))

        value = None

        m = _key_bool.match(k)
        if m:
            if v == "true":
                value = Value.makeBoolean(True)
            elif v == "false":
                value = Value.makeBoolean(False)
            else:
                logger.warning("Unrecognized boolean value %r for %s", v, m.group(1))
            continue

        m = _key_double.match(k)
        if m:
            try:
                value = Value.makeDouble(float(v))
            except ValueError as e:
                logger.warning("Unrecognized double value %r for %s", v, m.group(1))

            continue

        m = _key_string.match(k)
        if m:
            mm = _value_string.match(v)

            if mm:
                value = Value.makeString(_unescape_string(mm.group(1)))
            else:
                logger.warning("Unrecognized string value %r for %s", v, m.group(1))
            continue

        m = _key_raw.match(k)
        if m:
            try:
                v = base64.b64decode(v, validate=True)
                value = Value.makeRaw(v)
            except binascii.Error:
                logger.warning("Unrecognized raw value %r for %s", v, m.group(1))
            continue

        m = _key_bool_array.match(k)
        if m:
            bools = []
            arr = v.strip().split(",")
            if arr != [""]:
                for vv in arr:
                    vv = vv.strip()
                    if vv == "true":
                        bools.append(True)
                    elif vv == "false":
                        bools.append(False)
                    else:
                        logger.warning(
                            "Unrecognized bool '%s' in bool array %s'", vv, m.group(1)
                        )
                        bools = None
                        break

            if bools is not None:
                value = Value.makeBooleanArray(bools)
            continue

        m = _key_double_array.match(k)
        if m:
            doubles = []
            arr = v.strip().split(",")
            if arr != [""]:
                for vv in arr:
                    try:
                        doubles.append(float(vv))
                    except ValueError:
                        logger.warning(
                            "Unrecognized double '%s' in double array %s",
                            vv,
                            m.group(1),
                        )
                        doubles = None
                        break

            value = Value.makeDoubleArray(doubles)
            continue

        m = _key_string_array.match(k)
        if m:
            # Technically, this will let invalid inputs in... but,
            # I don't really care. Feel free to fix it if you do.
            strings = [_unescape_string(vv) for vv in _value_string.findall(v)]
            value = Value.makeStringArray(strings)
            continue

        logger.warning("Unrecognized type '%s'", k)

    if value:
        key = _unescape_string(m.group(1))
        if key.startswith(prefix):
            entries.append((key, value))

    return entries
コード例 #33
0
    def write_acserver_config(self, preset):
        config = RawConfigParser()
        config.optionxform = str
        cfg_file = open(
            os.path.join(self.acserver_config_dir, 'server_cfg.ini'), 'w')
        config.add_section('SERVER')

        # build a distinct list of car names
        car_list = []
        for driver in preset.entry_set.all():
            if driver.car.dirname not in car_list:
                car_list.append(driver.car.dirname)

        # set max_clients value to the track's pitbox value if null
        if not preset.max_clients:
            preset.max_clients = preset.track.pitboxes

        config.set('SERVER', 'NAME', preset.server_setting.name)
        config.set('SERVER', 'CARS', ';'.join(car_list))
        config.set('SERVER', 'CONFIG_TRACK', xstr(preset.track.subversion))
        config.set('SERVER', 'TRACK', preset.track.dirname)
        config.set('SERVER', 'SUN_ANGLE',
                   time_to_sun_angle(preset.time_of_day))
        config.set('SERVER', 'PASSWORD', str(preset.session_password))
        config.set('SERVER', 'ADMIN_PASSWORD',
                   str(preset.server_setting.admin_password))
        config.set('SERVER', 'UDP_PORT', str(preset.server_setting.udp_port))
        config.set('SERVER', 'TCP_PORT', str(preset.server_setting.tcp_port))
        config.set('SERVER', 'HTTP_PORT', str(preset.server_setting.http_port))
        config.set('SERVER', 'PICKUP_MODE_ENABLED',
                   str(int(preset.pickup_mode_enabled)))
        config.set('SERVER', 'LOOP_MODE', str(int(preset.loop_mode)))
        config.set('SERVER', 'SLEEP_TIME', '1')
        config.set('SERVER', 'CLIENT_SEND_INTERVAL',
                   str(preset.server_setting.client_send_interval))
        config.set('SERVER', 'SEND_BUFFER_SIZE',
                   str(preset.server_setting.send_buffer_size))
        config.set('SERVER', 'RECV_BUFFER_SIZE',
                   str(preset.server_setting.recv_buffer_size))
        config.set('SERVER', 'RACE_OVER_TIME', str(preset.race_over_time))
        config.set('SERVER', 'KICK_QUORUM', str(preset.kick_quorum))
        config.set('SERVER', 'VOTING_QUORUM', str(preset.voting_quorum))
        config.set('SERVER', 'VOTE_DURATION', str(preset.vote_duration))
        config.set('SERVER', 'BLACKLIST_MODE', str(preset.blacklist_mode))
        config.set('SERVER', 'FUEL_RATE', str(preset.fuel_rate))
        config.set('SERVER', 'DAMAGE_MULTIPLIER',
                   str(preset.damage_multiplier))
        config.set('SERVER', 'TYRE_WEAR_RATE', str(preset.tyre_wear_rate))
        config.set('SERVER', 'ALLOWED_TYRES_OUT',
                   str(preset.allowed_tyres_out))
        config.set('SERVER', 'ABS_ALLOWED', str(preset.abs_allowed))
        config.set('SERVER', 'TC_ALLOWED', str(preset.tc_allowed))
        config.set('SERVER', 'STABILITY_ALLOWED',
                   str(int(preset.stability_allowed)))
        config.set('SERVER', 'AUTOCLUTCH_ALLOWED',
                   str(int(preset.autoclutch_allowed)))
        config.set('SERVER', 'TYRE_BLANKETS_ALLOWED',
                   str(int(preset.tyre_blankets_allowed)))
        config.set('SERVER', 'FORCE_VIRTUAL_MIRROR',
                   str(int(preset.force_virtual_mirror)))
        config.set('SERVER', 'REGISTER_TO_LOBBY', '1')
        config.set('SERVER', 'MAX_CLIENTS', str(preset.max_clients))
        config.set('SERVER', 'UDP_PLUGIN_LOCAL_PORT', '11000')
        config.set('SERVER', 'UDP_PLUGIN_ADDRESS', '127.0.0.1:12000')
        config.set(
            'SERVER', 'AUTH_PLUGIN_ADDRESS',
            '127.0.0.1:50041/acauth?timeout=300&andurl1=www.minorating.com%3A805/minodata/auth/'
            + preset.server_setting.minorating_grade + '/')
        config.set('SERVER', 'LEGAL_TYRES', 'V;E;HR;ST')
        config.set('SERVER', 'START_RULE', str(preset.start_rule))
        config.set('SERVER', 'QUALIFY_MAX_WAIT_PERC',
                   str(preset.qualify_max_wait_perc))

        if preset.server_setting.welcome_message:
            config.set(
                'SERVER', 'WELCOME_MESSAGE',
                str(
                    os.path.join(self.acserver_config_dir,
                                 'welcome_message.txt')))

        if preset.practice_time != 0:
            config.add_section('PRACTICE')
            config.set('PRACTICE', 'NAME', 'Free Practice')
            config.set('PRACTICE', 'TIME', str(preset.practice_time))
            config.set('PRACTICE', 'IS_OPEN',
                       str(int(preset.practice_is_open)))

        if preset.qualify_time != 0:
            config.add_section('QUALIFY')
            config.set('QUALIFY', 'NAME', 'Qualify')
            config.set('QUALIFY', 'TIME', str(preset.qualify_time))
            config.set('QUALIFY', 'IS_OPEN', str(int(preset.qualify_is_open)))

        if preset.race_laps != 0:
            config.add_section('RACE')
            config.set('RACE', 'NAME', 'Race')
            config.set('RACE', 'LAPS', str(preset.race_laps))
            config.set('RACE', 'WAIT_TIME', str(preset.race_wait_time))
            config.set('RACE', 'IS_OPEN', str(preset.race_is_open))

        if preset.track_dynamism:
            config.add_section('DYNAMIC_TRACK')
            config.set('DYNAMIC_TRACK', 'SESSION_START',
                       str(preset.track_dynamism.session_start))
            config.set('DYNAMIC_TRACK', 'RANDOMNESS',
                       str(preset.track_dynamism.randomness))
            config.set('DYNAMIC_TRACK', 'SESSION_TRANSFER',
                       str(preset.track_dynamism.session_transfer))
            config.set('DYNAMIC_TRACK', 'LAP_GAIN',
                       str(preset.track_dynamism.lap_gain))

        weather_count = 0
        for weather in preset.weathers.all():
            weather_section = 'WEATHER_' + str(weather_count)
            config.add_section(weather_section)
            config.set(weather_section, 'GRAPHICS', weather.graphics)
            config.set(weather_section, 'BASE_TEMPERATURE_AMBIENT',
                       str(weather.base_temperature_ambient))
            config.set(weather_section, 'VARIATION_AMBIENT',
                       str(weather.variation_ambient))
            config.set(weather_section, 'BASE_TEMPERATURE_ROAD',
                       str(weather.base_temperature_road))
            config.set(weather_section, 'VARIATION_ROAD',
                       str(weather.variation_road))
            weather_count += 1

        config.write(cfg_file, space_around_delimiters=False)
        cfg_file.close()
コード例 #34
0
ファイル: __init__.py プロジェクト: orages/atcgen
 def get_info_from_ini(ini_path):
     parser = RawConfigParser()
     parser.optionxform = str  # keep leading uppercases
     parser.read(ini_path)
     attributes_section = parser[InfoInstruction.ini_section_name]
     return dict(attributes_section)
コード例 #35
0
def load_config(file="setup.cfg"):
    config = RawConfigParser()
    config.optionxform = lambda x: x.lower().replace("_", "-")
    config.read(file)
    return config
コード例 #36
0
ファイル: settings.py プロジェクト: powerswitch/TIMA
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/

For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os

from configparser import RawConfigParser

BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))

config = RawConfigParser()
config.optionxform = str
config.read(BASE_DIR + '/TIMA/settings.ini')

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config.get('secrets','SECRET_KEY')
SESSION_COOKIE_SECURE = config.getboolean('secrets','SESSION_COOKIE_SECURE')
CSRF_COOKIE_SECURE = config.getboolean('secrets','CSRF_COOKIE_SECURE')
SESSION_EXPIRE_AT_BROWSER_CLOSE = config.getboolean('secrets','SESSION_EXPIRE_AT_BROWSER_CLOSE')

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config.getboolean('debug','DEBUG')
TEMPLATE_DEBUG = config.getboolean('debug','TEMPLATE_DEBUG')
コード例 #37
0
class UsefulConfigParser(object):
    """A config parser that sucks less than those in module `ConfigParser`."""
    def __init__(self, filenames_to_try=[]):

        # FUN FACT:  In Python 3.2, they spontaneously changed the behaviour of
        # RawConfigParser so that it no longer considers ';' a comment delimiter
        # for inline comments.
        #
        # Compare:
        #   "Configuration files may include comments, prefixed by specific
        #   characters (# and ;). Comments may appear on their own in an otherwise
        #   empty line, or may be entered in lines holding values or section names.
        #   In the latter case, they need to be preceded by a whitespace character
        #   to be recognized as a comment. (For backwards compatibility, only ;
        #   starts an inline comment, while # does not.)"
        #  -- https://docs.python.org/2/library/configparser.html
        # vs:
        #   "Comment prefixes are strings that indicate the start of a valid comment
        #   within a config file. comment_prefixes are used only on otherwise empty
        #   lines (optionally indented) whereas inline_comment_prefixes can be used
        #   after every valid value (e.g. section names, options and empty lines as
        #   well). By default inline comments are disabled and '#' and ';' are used
        #   as prefixes for whole line comments.
        #   Changed in version 3.2: In previous versions of configparser behaviour
        #   matched comment_prefixes=('#',';') and inline_comment_prefixes=(';',)."
        #  -- https://docs.python.org/3/library/configparser.html#customizing-parser-behaviour
        #
        # Grrr...
        if sys.version_info.major >= 3:
            self._cp = RawConfigParser(dict_type=OrderedMultiDict,
                                       inline_comment_prefixes=(';', ))
        else:
            self._cp = RawConfigParser(dict_type=OrderedMultiDict)

        if isinstance(filenames_to_try, str):
            filenames_to_try = [filenames_to_try]
        self._filenames_to_try = filenames_to_try[:]

    def read(self, filenames_to_try=[]):
        if isinstance(filenames_to_try, str):
            filenames_to_try = [filenames_to_try]
        self._filenames_to_try.extend(filenames_to_try)
        return self._cp.read(self._filenames_to_try)

    def sections(self):
        return self._cp.sections()

    def options(self, section_name):
        ## The client code doesn't need to check in advance that the requested
        ## section name is present in the config; this function will check
        ## this automatically, so no exception is raised by RawConfigParser.

        ## Check that `section_name` is present in the config.
        ## Otherwise, RawConfigParser will raise ConfigParser.NoSectionError.
        if not self._cp.has_section(section_name):
            return []
        return self._cp.options(section_name)

    def get(self, section_name, option_name, do_optionxform=True):
        if do_optionxform:
            # https://docs.python.org/2/library/configparser.html#ConfigParser.RawConfigParser.optionxform
            option_name = self._cp.optionxform(option_name)

        if section_name is None:
            return self._get_optval_in_sections(self.sections(), option_name)
        elif isinstance(section_name, str):
            return self._get_optval_in_sections([section_name], option_name)
        else:
            return self._get_optval_in_sections(section_name, option_name)

    def _get_optval_in_sections(self, section_names, option_name):
        ## The client code doesn't need to check in advance that the requested
        ## section name(s) are present in the config; this function will check
        ## this automatically, so no exception is raised by RawConfigParser.
        optvals = []
        for section_name in section_names:
            ## Check that `section_name` is present in the config.
            ## Otherwise, RawConfigParser will raise ConfigParser.NoSectionError.
            if not self._cp.has_section(section_name):
                continue

            optvals.extend([
                optval for optname, optval in self._cp.items(section_name)
                if optname == option_name
            ])
        return optvals

    def getboolean(self, section_name, option_name, do_optionxform=True):
        # https://docs.python.org/2/library/configparser.html#ConfigParser.RawConfigParser.getboolean
        return [
            self._coerce_to_boolean(optval)
            for optval in self.get(section_name, option_name, do_optionxform)
        ]

    _boolean_states = {
        '1': True,
        'yes': True,
        'true': True,
        'on': True,
        '0': False,
        'no': False,
        'false': False,
        'off': False
    }

    def _coerce_to_boolean(self, optval_str):
        # 'The accepted values for the option are "1", "yes", "true", and "on",
        # which cause this method to return True, and "0", "no", "false", and
        # "off", which cause it to return False. These string values are checked
        # in a case-insensitive manner. Any other value will cause it to raise
        # ValueError.'
        # https://docs.python.org/2/library/configparser.html#ConfigParser.RawConfigParser.getboolean
        ovs_lower = optval_str.lower()
        if ovs_lower not in self._boolean_states:
            raise ValueError("Not a boolean: %s" % optval_str)
        return self._boolean_states[ovs_lower]
コード例 #38
0
ACCEPT_TYPES = ["application/json", "text/html"]

log.basicConfig(format='%(asctime)s %(name)s %(levelname)s: %(message)s',
                datefmt='%m/%d/%Y %I:%M:%S',
                level=log.DEBUG)

defaults_file = os.environ.get("WEBSERV_CONFIG", "~/.lsst/webserv.ini")
WERKZEUG_PREFIX = "dax.webserv.werkzeug."

# instance folder not under version control
i_path = os.path.join(os.path.expanduser("~"), ".lsst/instance")
app = Flask(__name__, instance_path=i_path)

# Initialize configuration
webserv_parser = RawConfigParser()
webserv_parser.optionxform = str

with open(os.path.expanduser(defaults_file)) as cfg:
    webserv_parser.readfp(cfg, defaults_file)

webserv_config = dict(webserv_parser.items("webserv"))
default_db_url = webserv_config.get("dax.webserv.db.url")
default_imgserv_meta_url = webserv_config.get("dax.imgserv.meta.url")

# Initialize configuration for ImageServ
imgserv_config_path = os.path.join(app.instance_path, "imgserv")
with app.app_context():
    # imgserv_config_path only prep for use of instance folder later
    is_api_v1.load_imgserv_config(None, default_imgserv_meta_url)

# Execute this last, we can overwrite anything we don't like
コード例 #39
0
# NOTE: this overwrites existing desktop.ini
#
from PIL import Image
import sys
from configparser import RawConfigParser
import ctypes
import os
#-- Vars
f = sys.argv[1]					# this will get the dragged file, (if it is not an image, it just closes)
i = Image.open(f)				# go ahead and load it as an image (otherwise just close like said above)
name = f.split('.')[0]+'.ico'	# wanna split get the filename and get index 0, so like 001.jpg => [001],[jpg]
ico = i.save(name)				# convert and save the file to the same folder the the dragged image came from
dir = os.path.dirname(f)
#-- Desktop.ini generator
config = RawConfigParser()
config.optionxform=str
cfFile = open(dir+'\\desktop.ini','w')

# Create the ini sections we want [.ShellClassInfo], [ViewState]
config.add_section('.ShellClassInfo')
config.add_section('ViewState')

# Icon Path, autogenerated from the source of the dragged object
config.set('.ShellClassInfo','IconResource',name+',0')
# Folder type, should be Video, Generic, Pictures
config.set('ViewState','FolderType','Generic')
# write the file to disk (save)
config.write(cfFile)
# close the stream
cfFile.close()
# get the dir, not the file
コード例 #40
0
    with open(sys_cfg_file, 'w') as f:
        json.dump(cfg, f, indent=4)


#################################################################

# Project Configuration

#################################################################

proj_path = None
proj_cfg = RawConfigParser(allow_no_value=True)
proj_cfg['ROI_DEFS'] = {}
proj_cfg['STIM_DEFS'] = {}

proj_cfg.optionxform = str
special = {}
df_refs = {}


def save_proj_config():
    set_proj_special()
    with open(proj_path + '/config.cfg', 'w') as configfile:
        proj_cfg.write(configfile)


def create_new_proj_config():
    defaultInclude = ['SampleID', 'date', 'comments']
    proj_cfg['INCLUDE'] = dict.fromkeys(defaultInclude)

    defaultExclude = ['ImgInfoPath', 'ImgPath', 'ImgUUID', 'ROI_State', 'uuid_curve', 'misc', 'AnimalID']
コード例 #41
0
ファイル: settings.py プロジェクト: klaertus/Secret_hat
from configparser import RawConfigParser
import os
from multiprocessing import Process
from time import sleep

"""
Settings application
"""
# Reset mode to finish line 178
sense = SenseHat()
wireless = Wireless()

config = RawConfigParser(allow_no_value = True)
password_config = RawConfigParser(allow_no_value = True)
config.optionxform = str
config.optionxform = lambda option: option

config.read('config.ini')
password_config.read('password_config.ini')




def main(color1, color2, speed):

    """
    Args :
        color1 (list or tuple) : rgb color code of titles
        color2 (list or tuple) : rgb color code of subtitles
        speed (float) : speed of the scrolling of message
コード例 #42
0
ファイル: _display.py プロジェクト: plasmon360/ModelicaRes
    def find(self, dimension):
        """Return the display unit for a particular dimension.

        **Parameters:**

        - *dimension*: Dictionary of base dimensions and exponents

        **Returns:** The display unit as a string
        """
        return '.'.join([
            self[base][1] if exp == 1 else self[base][1] + str(exp)
            for base, exp in dimension.items()
        ])


# Load the default display units.
try:
    config = RawConfigParser(interpolation=None, inline_comment_prefixes=[';'])
except TypeError:
    config = RawConfigParser()
config.optionxform = str  # Dimensions are case sensitive.
config.read(path.join(dname, 'display.ini'))
default_display_units = DisplayUnits(config.items('Default display units'))

if __name__ == '__main__':
    # Test the contents of this file.

    import doctest

    doctest.testmod()
コード例 #43
0
# working with xfce4-terminal-0.6.2-1

if __name__ == "__main__":

    import sys

    PM = ["+", "-"]
    if len(sys.argv) != 2 or sys.argv[1] not in PM:
        sys.exit("Usage {0} {1}".format(sys.argv[0], "/".join(PM)))

    delta = int("{0}1".format(sys.argv[1]))

    import os
    import re
    from configparser import RawConfigParser

    # read input file
    f = os.path.join(os.environ["XDG_CONFIG_HOME"], "xfce4", "terminal",
                     "terminalrc")
    c = RawConfigParser()
    c.optionxform = str  # make keys case-sensitive
    c.read(f)
    font, size = re.search(r'^(.*) (\d+)$',
                           c['Configuration']['FontName']).group(1, 2)
    size = int(size) + delta
    c['Configuration']['FontName'] = "{0} {1}".format(font, size)

    with open(f, 'w') as c_new:
        c.write(c_new)
コード例 #44
0
    def load(cls, hostsConf):
        """ Load several hosts from a configuration file.
        Return an dictionary with hostName -> hostConfig pairs.
        """
        # Read from users' config file. Raw to avoid interpolation of %: we expect %_
        cp = RawConfigParser(comment_prefixes=";")
        cp.optionxform = str  # keep case (stackoverflow.com/questions/1611799)
        hosts = OrderedDict()

        try:
            assert cp.read(hostsConf) != [], 'Missing file %s' % hostsConf

            for hostName in cp.sections():
                host = HostConfig(label=hostName, hostName=hostName)
                host.setHostPath(pw.Config.SCIPION_USER_DATA)

                # Helper functions (to write less)
                def get(var, default=None):
                    if cp.has_option(hostName, var):

                        value = cp.get(hostName, var)
                        # Rescue python2.7 behaviour: ## at the beginning of a line, means a single #.
                        # https://github.com/scipion-em/scipion-pyworkflow/issues/70
                        value = value.replace("\n##", "\n#")

                        # Keep compatibility: %_ --> %%
                        value = value.replace('%_(', '%(')

                        return value
                    else:
                        return default

                def getDict(var):
                    od = OrderedDict()

                    if cp.has_option(hostName, var):
                        for key, value in json.loads(get(var)).items():
                            od[key] = value

                    return od

                host.setScipionHome(
                    get(pw.SCIPION_HOME_VAR, pw.Config.SCIPION_HOME))
                host.setScipionConfig(pw.Config.SCIPION_CONFIG)
                # Read the address of the remote hosts,
                # using 'localhost' as default for backward compatibility
                host.setAddress(get('ADDRESS', 'localhost'))
                host.mpiCommand.set(get('PARALLEL_COMMAND'))
                host.queueSystem = QueueSystemConfig()
                hostQueue = host.queueSystem  # shortcut
                hostQueue.name.set(get('NAME'))

                # If the NAME is not provided or empty
                # do no try to parse the rest of Queue parameters
                if hostQueue.hasName():
                    hostQueue.setMandatory(get('MANDATORY', 0))
                    hostQueue.submitPrefix.set(get('SUBMIT_PREFIX', ''))
                    hostQueue.submitCommand.set(get('SUBMIT_COMMAND'))
                    hostQueue.submitTemplate.set(get('SUBMIT_TEMPLATE'))
                    hostQueue.cancelCommand.set(get('CANCEL_COMMAND'))
                    hostQueue.checkCommand.set(get('CHECK_COMMAND'))
                    hostQueue.jobDoneRegex.set(get('JOB_DONE_REGEX'))
                    hostQueue.queues = getDict('QUEUES')
                    hostQueue.queuesDefault = getDict('QUEUES_DEFAULT')

                hosts[hostName] = host

            return hosts
        except Exception as e:
            sys.exit('Failed to read settings. The reported error was:\n  %s\n'
                     'To solve it, delete %s and run again.' %
                     (e, os.path.abspath(hostsConf)))
コード例 #45
0
ファイル: units.py プロジェクト: tahayasardemir/OpenMDAO
def import_library(libfilepointer):
    """
    Import a units library, replacing any existing definitions.

    Parameters
    ----------
    libfilepointer : file
        new library file to work with

    Returns
    -------
    ConfigParser
        newly updated units library for the module
    """
    global _UNIT_LIB
    global _UNIT_CACHE
    _UNIT_CACHE = {}
    _UNIT_LIB = ConfigParser()
    _UNIT_LIB.optionxform = _do_nothing

    # New in Python 3.2: read_file() replaces readfp().
    if sys.version_info >= (3, 2):
        _UNIT_LIB.read_file(libfilepointer)
    else:
        _UNIT_LIB.readfp(libfilepointer)

    required_base_types = ['length', 'mass', 'time', 'temperature', 'angle']
    _UNIT_LIB.base_names = list()
    # used to is_angle() and other base type checking
    _UNIT_LIB.base_types = dict()
    _UNIT_LIB.unit_table = dict()
    _UNIT_LIB.prefixes = dict()
    _UNIT_LIB.help = list()

    for prefix, factor in _UNIT_LIB.items('prefixes'):
        factor, comma, comment = factor.partition(',')
        _UNIT_LIB.prefixes[prefix] = float(factor)

    base_list = [0] * len(_UNIT_LIB.items('base_units'))

    for i, (unit_type, name) in enumerate(_UNIT_LIB.items('base_units')):
        _UNIT_LIB.base_types[unit_type] = i
        powers = list(base_list)
        powers[i] = 1
        # print '%20s'%unit_type, powers
        # cant use add_unit because no base units exist yet
        _new_unit(name, 1, powers)
        _UNIT_LIB.base_names.append(name)

    # test for required base types
    missing = [
        utype for utype in required_base_types
        if utype not in _UNIT_LIB.base_types
    ]
    if missing:
        raise ValueError('Not all required base type were present in the'
                         ' config file. missing: %s, at least %s required' %
                         (missing, required_base_types))

    _update_library(_UNIT_LIB)
    return _UNIT_LIB
コード例 #46
0
 def __init__(self, path):
     config = RawConfigParser()
     config.optionxform = lambda s: s
     if path:  # Test support
         config.read(path)
     self.config = config
    url = "http://xlr:5516/api/v1/config/byTypeAndTitle?configurationType=%s&title=%s" % (server_type, url_encoded_title)
    request = urllib.request.Request(url) 
    request.add_header("Authorization", "Basic %s" % base64string)   
    result = urllib.request.urlopen(request)
    return json.loads(result.read())[0]

def save_configuration_object(config_object):
    headers = {'Content-Type': 'application/json'}
    request = urllib.request.Request("http://xlr:5516/api/v1/config/%s" % (config_object["id"]), json.dumps(config_object).encode("utf-8"), headers)
    request.add_header("Authorization", "Basic %s" % base64string)   
    request.get_method = lambda: 'PUT'
    result2 = urllib.request.urlopen(request)

def update_ci(server_title, server_type, username, properties):
    print("Processing credential [%s] for server type [%s] with title [%s]" % (username, server_type, server_title))
    config_object = get_configuration_object(section, server_type)
    for item in properties:
        config_object[item[0]] = item[1]
    save_configuration_object(config_object)

cp = RawConfigParser()
#To avoid parser to convert all keys to lowercase by default
cp.optionxform = str
cp.read(sys.argv[1])

for section in cp.sections():
    update_ci(section, cp.get(section, "type"), cp.get(section, "username"), cp.items(section))

print("Updated credentials")

コード例 #48
0
ファイル: setup.py プロジェクト: zurgeg/grammar-check
def load_config(file='setup.cfg'):
    config = RawConfigParser()
    config.optionxform = lambda x: x.lower().replace('_', '-')
    config.read(file)
    return config
コード例 #49
0
ファイル: eds.py プロジェクト: christiansandberg/canopen
def export_eds(od, dest=None, file_info={}, device_commisioning=False):
    def export_object(obj, eds):
        if type(obj) is objectdictionary.Variable:
            return export_variable(obj, eds)
        if type(obj) is objectdictionary.Record:
            return export_record(obj, eds)
        if type(obj) is objectdictionary.Array:
            return export_array(obj, eds)

    def export_common(var, eds, section):
        eds.add_section(section)
        eds.set(section, "ParameterName", var.name)
        if var.storage_location:
            eds.set(section, "StorageLocation", var.storage_location)

    def export_variable(var, eds):
        if type(var.parent) is objectdictionary.ObjectDictionary:
            # top level variable
            section = "%04X" % var.index
        else:
            # nested variable
            section = "%04Xsub%X" % (var.index, var.subindex)

        export_common(var, eds, section)
        eds.set(section, "ObjectType", "0x%X" % VAR)
        if var.data_type:
            eds.set(section, "DataType", "0x%04X" % var.data_type)
        if var.access_type:
            eds.set(section, "AccessType", var.access_type)

        if getattr(var, 'default_raw', None) is not None:
            eds.set(section, "DefaultValue", var.default_raw)
        elif getattr(var, 'default', None) is not None:
            eds.set(section, "DefaultValue",
                    _revert_variable(var.data_type, var.default))

        if device_commisioning:
            if getattr(var, 'value_raw', None) is not None:
                eds.set(section, "ParameterValue", var.value_raw)
            elif getattr(var, 'value', None) is not None:
                eds.set(section, "ParameterValue",
                        _revert_variable(var.data_type, var.default))

        eds.set(section, "DataType", "0x%04X" % var.data_type)
        eds.set(section, "PDOMapping", hex(var.pdo_mappable))

        if getattr(var, 'min', None) is not None:
            eds.set(section, "LowLimit", var.min)
        if getattr(var, 'max', None) is not None:
            eds.set(section, "HighLimit", var.max)

    def export_record(var, eds):
        section = "%04X" % var.index
        export_common(var, eds, section)
        eds.set(section, "SubNumber", "0x%X" % len(var.subindices))
        ot = RECORD if type(var) is objectdictionary.Record else ARR
        eds.set(section, "ObjectType", "0x%X" % ot)
        for i in var:
            export_variable(var[i], eds)

    export_array = export_record

    eds = RawConfigParser()
    # both disables lowercasing, and allows int keys
    eds.optionxform = str

    from datetime import datetime as dt
    defmtime = dt.utcnow()

    try:
        # only if eds was loaded by us
        origFileInfo = od.__edsFileInfo
    except AttributeError:
        origFileInfo = {
            # just set some defaults
            "CreationDate": defmtime.strftime("%m-%d-%Y"),
            "CreationTime": defmtime.strftime("%I:%m%p"),
            "EdsVersion": 4.2,
        }

    file_info.setdefault("ModificationDate", defmtime.strftime("%m-%d-%Y"))
    file_info.setdefault("ModificationTime", defmtime.strftime("%I:%m%p"))
    for k, v in origFileInfo.items():
        file_info.setdefault(k, v)

    eds.add_section("FileInfo")
    for k, v in file_info.items():
        eds.set("FileInfo", k, v)

    eds.add_section("DeviceInfo")
    for eprop, odprop in [
        ("VendorName", "vendor_name"),
        ("VendorNumber", "vendor_number"),
        ("ProductName", "product_name"),
        ("ProductNumber", "product_number"),
        ("RevisionNumber", "revision_number"),
        ("OrderCode", "order_code"),
        ("SimpleBootUpMaster", "simple_boot_up_master"),
        ("SimpleBootUpSlave", "simple_boot_up_slave"),
        ("Granularity", "granularity"),
        ("DynamicChannelsSupported", "dynamic_channels_supported"),
        ("GroupMessaging", "group_messaging"),
        ("NrOfRXPDO", "nr_of_RXPDO"),
        ("NrOfTXPDO", "nr_of_TXPDO"),
        ("LSS_Supported", "LSS_supported"),
    ]:
        val = getattr(od.device_information, odprop, None)
        if type(val) is None:
            continue
        elif type(val) is str:
            eds.set("DeviceInfo", eprop, val)
        elif type(val) in (int, bool):
            eds.set("DeviceInfo", eprop, int(val))

    # we are also adding out of spec baudrates here.
    for rate in od.device_information.allowed_baudrates.union(
        {10e3, 20e3, 50e3, 125e3, 250e3, 500e3, 800e3, 1000e3}):
        eds.set("DeviceInfo", "BaudRate_%i" % (rate / 1000),
                int(rate in od.device_information.allowed_baudrates))

    if device_commisioning and (od.bitrate or od.node_id):
        eds.add_section("DeviceComissioning")
        if od.bitrate:
            eds.set("DeviceComissioning", "BaudRate", int(od.bitrate / 1000))
        if od.node_id:
            eds.set("DeviceComissioning", "NodeID", int(od.node_id))

    eds.add_section("Comments")
    i = 0
    for line in od.comments.splitlines():
        i += 1
        eds.set("Comments", "Line%i" % i, line)
    eds.set("Comments", "Lines", i)

    eds.add_section("DummyUsage")
    for i in range(1, 8):
        key = "Dummy%04d" % i
        eds.set("DummyUsage", key, 1 if (key in od) else 0)

    def mandatory_indices(x):
        return x in {0x1000, 0x1001, 0x1018}

    def manufacturer_idices(x):
        return x in range(0x2000, 0x6000)

    def optional_indices(x):
        return all((
            x > 0x1001,
            not mandatory_indices(x),
            not manufacturer_idices(x),
        ))

    supported_mantatory_indices = list(filter(mandatory_indices, od))
    supported_optional_indices = list(filter(optional_indices, od))
    supported_manufacturer_indices = list(filter(manufacturer_idices, od))

    def add_list(section, list):
        eds.add_section(section)
        eds.set(section, "SupportedObjects", len(list))
        for i in range(0, len(list)):
            eds.set(section, (i + 1), "0x%04X" % list[i])
        for index in list:
            export_object(od[index], eds)

    add_list("MandatoryObjects", supported_mantatory_indices)
    add_list("OptionalObjects", supported_optional_indices)
    add_list("ManufacturerObjects", supported_manufacturer_indices)

    if not dest:
        import sys
        dest = sys.stdout

    eds.write(dest, False)
コード例 #50
0
            "FuncSanity": "Runs gene callers and annotation programs on MAGs",
        }, {
            "PhyloSanity": ("directory", "config_file", "cancel_autocommit",
                            "output_directory", "biometadb_project"),
            "FuncSanity": ("directory", "config_file", "cancel_autocommit",
                           "output_directory", "biometadb_project",
                           "type_file", "prokka", "reevaluate_quality"),
        }))

if ap.args.version:
    print("MetaSanity version %s" % VERSION)
    exit(0)

# Config file read in
cfg = RawConfigParser()
cfg.optionxform = str
cfg.read(ap.args.config_file)

met_list = {"PhyloSanity": "evaluation.list", "FuncSanity": "annotation.list"}

prokka_add = []
if ap.args.prokka:
    prokka_add = ["--prokka"]

cid_file_name = 'docker.pid'

# Run docker version
if not os.path.exists(PIPEDM_PATH):
    try:
        subprocess.run(
            [
コード例 #51
0
ファイル: setup.py プロジェクト: Genki-S/language-check
def load_config(file='setup.cfg'):
    config = RawConfigParser()
    config.optionxform = lambda x: x.lower().replace('_', '-')
    config.read(file)
    return config
コード例 #52
0
ファイル: library.py プロジェクト: Jerrythafast/FDSTools
def parse_library(handle):
    """Parse an FDSTools library file as made with the library tool."""
    markers = {}
    ini = RawConfigParser()
    ini.optionxform = str
    ini.readfp(handle)
    for section in ini.sections():
        for marker in ini.options(section):
            value = ini.get(section, marker).split(";", 1)[0].strip()
            section_low = section.lower()
            if section_low == "flanks":
                value = PAT_SPLIT.split(value)
                if len(value) != 2:
                    raise ValueError(
                        "For marker %s, %i flanking sequences were given, "
                        "need exactly 2" % (marker, len(value)))
                for i, val in enumerate(value):
                    if PAT_SEQ_IUPAC.match(val) is None:
                        try:
                            value[i] = int(val)
                            if value[i] < 1:
                                raise ValueError
                        except:
                            raise ValueError(
                                "Flanking sequence '%s' of marker %s is invalid"
                                % (val, marker))
            elif section_low in ("prefix", "suffix"):
                if not PAT_SEQ_RAW.match(value):
                    raise ValueError(
                        "The %s sequence '%s' of marker %s is invalid (note: only one sequence "
                        "needs to be specified for reference)" %
                        (section_low, value, marker))
            elif section_low == "genome_position":
                value = PAT_SPLIT.split(value)
                chromosome = PAT_CHROMOSOME.match(value[0])
                if chromosome is None:
                    raise ValueError("Invalid chromosome '%s' for marker %s" %
                                     (value[0], marker))
                pos = [chromosome.group(1)]
                for i in range(1, len(value)):
                    try:
                        pos.append(int(value[i]))
                    except:
                        raise ValueError(
                            "Position '%s' of marker %s is not a valid integer"
                            % (value[i], marker))
                    if not i % 2 and pos[-2] >= pos[-1]:
                        raise ValueError(
                            "End position %i of marker %s must be higher than "
                            "corresponding start position %i" %
                            (pos[-1], marker, pos[-2]))
                if len(value) == 1:
                    pos.append(1)
                value = tuple(pos)
            elif section_low == "length_adjust":
                try:
                    value = int(value)
                except:
                    raise ValueError(
                        "Length adjustment '%s' of marker %s is not a valid "
                        "integer" % (value, marker))
            elif section_low == "block_length":
                try:
                    value = int(value)
                except:
                    raise ValueError(
                        "Block length '%s' of marker %s is not a valid integer"
                        % (value, marker))
            elif section_low == "max_expected_copies":
                try:
                    value = int(value)
                except:
                    raise ValueError(
                        "Maximum number of expected copies '%s' of marker %s "
                        "is not a valid integer" % (value, marker))
            elif section_low == "repeat":
                if PAT_STR_DEF.match(value) is None:
                    raise ValueError(
                        "STR definition '%s' of marker %s is invalid" %
                        (value, marker))
            elif section_low == "no_repeat":
                if PAT_SEQ_RAW.match(value) is None:
                    raise ValueError(
                        "Reference sequence '%s' of marker %s is invalid" %
                        (value, marker))
            elif section_low == "microhaplotype_positions":
                value = PAT_SPLIT.split(value)
                for i in range(len(value)):
                    try:
                        value[i] = int(value[i])
                    except:
                        raise ValueError(
                            "Invalid position number '%s' for microhaplotype marker %s"
                            % (value[i], marker))
            elif section_low == "expected_allele_length":
                value = PAT_SPLIT.split(value)
                try:
                    min_length = int(value[0])
                except:
                    raise ValueError(
                        "Minimum expected allele length '%s' of marker %s "
                        "is not a valid integer" % (value[0], marker))
                if len(value) == 2:
                    try:
                        max_length = int(value[1])
                    except:
                        raise ValueError(
                            "Maximum expected allele length '%s' of marker %s "
                            "is not a valid integer" % (value[1], marker))
                elif len(value) > 2:
                    raise ValueError(
                        "%i values specified for expected_allele_length of marker %s; specify "
                        "only a minimum and optionally a maximum length" %
                        (len(value), marker))
                else:
                    max_length = sys.maxsize
                value = (min_length, max_length)

            # Store the validated value.
            if marker not in markers:
                markers[marker] = {}
            markers[marker][section_low] = value

    # Create a ReportedRangeStore to store data about each marker.
    reported_range_store = classes.ReportedRangeStore()
    MUTEX_GROUPS = {
        "explicit STR":
        ("prefix", "suffix", "repeat", "length_adjust", "block_length"),
        "explicit non-STR": ("no_repeat", ),
        "STRNaming-specific":
        tuple()
    }  # NOTE: No STRNaming-specific settings now...
    for marker, settings in markers.items():
        groups = [
            mutex_group for mutex_group, sections in MUTEX_GROUPS.items()
            if any(section in settings for section in sections)
        ]
        if len(groups) > 1:
            raise ValueError(
                "The definition of marker %s is ambiguous, because it appears in %s"
                % (marker, " and ".join("%s sections (%s)" % (group, ", ".join(
                    section
                    for section in MUTEX_GROUPS[group] if section in settings))
                                        for group in groups)))
        options = {
            option: settings[option]
            for option in {
                "flanks", "max_expected_copies", "expected_allele_length",
                "microhaplotype_positions"
            } & settings.keys()
        }
        if "explicit STR" in groups:
            # Legacy FDSTools-style definition of an STR marker.
            # TODO: Alias of STR markers was defined as excluding the prefix/suffix!
            if "flanks" not in options:
                options["flanks"] = ("", "")
            elif any(isinstance(flank, int) for flank in options["flanks"]):
                raise ValueError(
                    "Please specify an explit flanking sequence, not just a length, for marker %s"
                    % marker)
            reported_range = add_legacy_range(
                reported_range_store, marker, settings.get("prefix", ""),
                settings.get("suffix", ""),
                [(unit, int(min_repeats), int(max_repeats))
                 for unit, min_repeats, max_repeats in
                 PAT_STR_DEF_BLOCK.findall(settings.get("repeat", ""))],
                options, settings.get("genome_position", None))
            if "length_adjust" in settings:
                reported_range.length_adjust -= settings["length_adjust"]
            if "block_length" in settings:
                reported_range.block_length = settings["block_length"]
        elif "explicit non-STR" in groups:
            # Legacy FDSTools-style definition of a non-STR marker.
            if "flanks" not in options:
                options["flanks"] = ("", "")
            elif any(isinstance(flank, int) for flank in options["flanks"]):
                raise ValueError(
                    "Please specify an explit flanking sequence, not just a length, for marker %s"
                    % marker)
            refseq = settings["no_repeat"]
            pos = None
            if "genome_position" in settings:
                pos = settings["genome_position"]

                # Sanity check: end position should reflect ref length.
                length = sum(pos[i] - pos[i - 1] + 1
                             for i in range(2, len(pos), 2))
                if len(refseq) < length or (len(pos) % 2
                                            and len(refseq) != length):
                    raise ValueError(
                        "Length of reference sequence of marker %s is %i bases, but "
                        "genome positions add up to %i bases" %
                        (marker, len(refseq), length))
            add_legacy_range(reported_range_store, marker, refseq, "", [],
                             options, pos)
        else:
            # Use STRNaming for this marker.
            try:
                genome_position = settings["genome_position"]
            except KeyError:
                raise ValueError(
                    "No genome_position or explicit repeat or no_repeat "
                    "configuration provided for marker %s" % marker)
            if not len(genome_position) % 2:
                raise ValueError(
                    "Invalid genomic position given for marker %s: need an odd number of values "
                    "(chromosome, start position, end position[, start2, end2, ...])"
                    % marker)
            # TODO: Alias of STR markers was defined as excluding the prefix/suffix!
            if len(genome_position) == 3:
                chromosome, start, end = genome_position
                reported_range_store.add_range(marker,
                                               chromosome,
                                               start,
                                               end + 1,
                                               load_structures=True,
                                               options=options)
            else:
                reported_range_store.add_complex_range(marker,
                                                       genome_position,
                                                       options=options)

        if "microhaplotype_positions" in settings:
            # Put Ns in reporting range refseq for microhaplotype markers.
            reported_range = reported_range_store.get_range(marker)
            if reported_range.library:
                raise ValueError(
                    "Cannot define microhaplotype positions for STR marker %s"
                    % marker)
            refseq = list(reported_range.refseq)
            location = reported_range.location
            for position in settings["microhaplotype_positions"]:
                refseq[libsequence.get_genome_pos(location,
                                                  position,
                                                  invert=True)] = "N"
            reported_range.refseq = "".join(refseq)
    return reported_range_store
コード例 #53
0
ファイル: corenlp.py プロジェクト: HMRLKE/emLam
 def __load_config(self, props_file):
     cp = RawConfigParser({'memory': '4g'})
     cp.optionxform = str  # So no lowercasing occurs
     cp.read(props_file)
     return cp
コード例 #54
0
 def read_config(self, path):
     config = RawConfigParser()
     config.optionxform = lambda s: s
     config.read(path)
     return config
コード例 #55
0
def make_empty_library_ini(type, microhaplotypes=False):
    ini = RawConfigParser(allow_no_value=True)
    ini.optionxform = str
    ini.add_comment = MethodType(ini_add_comment, ini)

    # Create sections and add comments to explain how to use them.
    ini.add_section("genome_position")
    ini.add_comment(
        "genome_position",  #smart, str, non-str, full
        "Specify the chromosome number and positions of the first and last reported nucleotide of "
        "each marker (both inclusive, using human genome build GRCh38%s). This range should not "
        "include the primer binding sites.%s" %
        (" and rCRS for human mtDNA" if type != "str" else "",
         " This section is required for automatic configuration of markers; it is optional "
         "for markers that are explictily configured in this library file."
         if type != "smart" else ""))
    if type != "str":
        ini.add_comment(
            "genome_position",
            "Specify 'M' as the chromosome name for markers on mitochondrial "
            "DNA. Allele names generated for these markers will follow mtDNA "
            "nomenclature guidelines (Parson et al., 2014). If one of your "
            "mtDNA fragments starts near the end of the reference sequence "
            "and continues at the beginning, you can obtain correct base "
            "numbering by specifying the fragment's genome position as \"M, "
            "(starting position), 16569, 1, (ending position)\". This tells "
            "FDSTools that the marker is a concatenation of two fragments, "
            "where the first fragment ends at position 16569 and the second "
            "fragment starts at position 1. Similarly, for a fragment that "
            "spans position 3107 in the rCRS (which is nonexistent), you may "
            "specify \"M, (starting position), 3106, 3108, (ending "
            "position)\".")
    if microhaplotypes or type == "full":
        ini.add_section("microhaplotype_positions")
        ini.add_comment(
            "microhaplotype_positions",
            "For each microhaplotype marker, specify one or more positions of SNPs that should "
            "be reported as part of the microhaplotype.%s" %
            (" If the [genome_position] of the marker is given, positions must be within the "
             "given range. Otherwise, the reference sequence must be explicitly provided in "
             "the [no_repeat] section position 1 refers to the first base in the reference "
             "sequence." if type in ("non-str", "full") else ""))
    ini.add_section("flanks")
    ini.add_comment(
        "flanks",
        "The TSSV tool will use a pair of short anchor sequences just outside the reported range "
        "of each marker (e.g., primer sequences) to identify which input reads correspond to "
        "which marker. %s The sequence may contain IUPAC codes for ambiguous positions to account "
        "for degenerate bases in the primers or for bisulfite-converted targets in methylation-"
        "based studies (e.g., Y matches either C or T)." %
        ("Specify two comma-separated values: left flank and right flank sequence, in the same "
         "sequence orientation (strand)." if type in ("str", "non-str") else
         ("The default length of the anchor sequences used can be specified as an argument to "
          "the TSSV tool. Individual alternative lengths can be specified here for each marker. "
          "Specify two comma-separated values: one for the left and one for the right flank. "
          "The value can be a number (the length of sequence to use) or an explicit anchor "
          "sequence to use.%s" %
          (" For markers configured explicitly in this library file, the anchor sequences "
           "must be specified explicitly as well." if type == "full" else "")))
    )
    ini.add_section("max_expected_copies")
    ini.add_comment(
        "max_expected_copies",
        "By default, the Allelefinder tool will report up to 2 alleles per marker, but only a "
        "single allele for markers %son the Y chromosome. If this is incorrect, specify the "
        "maximum expected number of copies (i.e., alleles) for each marker in a "
        "single-contributor reference sample here." %
        ("on mitochondrial DNA or " if type != "str" else ""))
    ini.add_section("expected_allele_length")
    ini.add_comment(
        "expected_allele_length",
        "Specify one or two values for each marker. The first value gives the "
        "expected minimum length (in nucleotides, %sexcluding flanks) of the "
        "alleles and the second value (if given) specifies the maximum allele "
        "length expected for that marker (both inclusive). The TSSV tool will filter "
        "sequences that have a length outside this range." %
        ("including prefix and suffix, " if type in ("str", "full") else ""))

    if type in ("str", "full"):
        ini.add_section("prefix")
        ini.add_comment(
            "prefix",
            "For explicitly-configured STR markers: Specify the prefix sequence of each STR "
            "marker. The prefix is the sequence between the left flank and the repeat and is "
            "omitted from allele names. The sequence is used as the reference sequence for that "
            "marker when generating allele names. Deviations from the reference are expressed as "
            "variants.")
        ini.add_section("suffix")
        ini.add_comment(
            "suffix",
            "For explicitly-configured STR markers: Specify the suffix sequence of each STR "
            "marker. The suffix is the sequence between the repeat and the right flank. The "
            "sequence is used as the reference sequence for that marker when generating allele "
            "names.")
        ini.add_section("repeat")
        ini.add_comment(
            "repeat",
            "For explicitly-configured STR markers: Specify the repeat structure of each STR "
            "marker in space-separated triples of sequence, minimum number of repeats, and "
            "maximum number of repeats.")
        ini.add_section("length_adjust")
        ini.add_comment(
            "length_adjust",
            "For explicitly-configured STR markers: To prevent discrepancies between traditional "
            "CE allele numbers and the CE number in FDSTools allele names, the CE allele number "
            "as calculated by FDSTools is based on the length of the repeat sequence minus the "
            "adjustment specified here.")
        ini.add_section("block_length")
        ini.add_comment(
            "block_length",
            "For explicitly-configured STR markers: Specify the repeat unit length of each STR "
            "marker. By default, the length of the repeat unit of the longest repeat is used."
        )
    if type in ("non-str", "full"):
        ini.add_section("no_repeat")
        ini.add_comment(
            "no_repeat",
            "For explicitly-configured non-STR markers: Specify the reference sequence for each "
            "non-STR marker.")
    return ini
コード例 #56
0
ファイル: krainstance.py プロジェクト: taus11/freeipa
    def __spawn_instance(self):
        """
        Create and configure a new KRA instance using pkispawn.
        Creates a configuration file with IPA-specific
        parameters and passes it to the base class to call pkispawn
        """

        # Create an empty and secured file
        (cfg_fd, cfg_file) = tempfile.mkstemp()
        os.close(cfg_fd)
        pent = pwd.getpwnam(self.service_user)
        os.chown(cfg_file, pent.pw_uid, pent.pw_gid)
        self.tmp_agent_db = tempfile.mkdtemp(prefix="tmp-",
                                             dir=paths.VAR_LIB_IPA)
        tmp_agent_pwd = ipautil.ipa_generate_password()

        # Create a temporary file for the admin PKCS #12 file
        (admin_p12_fd, admin_p12_file) = tempfile.mkstemp()
        os.close(admin_p12_fd)

        # Create KRA configuration
        config = RawConfigParser()
        config.optionxform = str
        config.add_section("KRA")

        # Security Domain Authentication
        config.set("KRA", "pki_security_domain_https_port", "443")
        config.set("KRA", "pki_security_domain_password", self.admin_password)
        config.set("KRA", "pki_security_domain_user", self.admin_user)

        # issuing ca
        config.set("KRA", "pki_issuing_ca_uri",
                   "https://%s" % ipautil.format_netloc(self.fqdn, 443))

        # Server
        config.set("KRA", "pki_enable_proxy", "True")
        config.set("KRA", "pki_restart_configured_instance", "False")
        config.set("KRA", "pki_backup_keys", "True")
        config.set("KRA", "pki_backup_password", self.admin_password)

        # Client security database
        config.set("KRA", "pki_client_database_dir", self.tmp_agent_db)
        config.set("KRA", "pki_client_database_password", tmp_agent_pwd)
        config.set("KRA", "pki_client_database_purge", "True")
        config.set("KRA", "pki_client_pkcs12_password", self.admin_password)

        # Administrator
        config.set("KRA", "pki_admin_name", self.admin_user)
        config.set("KRA", "pki_admin_uid", self.admin_user)
        config.set("KRA", "pki_admin_email", "root@localhost")
        config.set("KRA", "pki_admin_password", self.admin_password)
        config.set("KRA", "pki_admin_nickname", "ipa-ca-agent")
        config.set("KRA", "pki_admin_subject_dn",
                   str(DN(('cn', 'ipa-ca-agent'), self.subject_base)))
        config.set("KRA", "pki_import_admin_cert", "False")
        config.set("KRA", "pki_client_admin_cert_p12", admin_p12_file)

        # Directory server
        config.set("KRA", "pki_ds_ldap_port", "389")
        config.set("KRA", "pki_ds_password", self.dm_password)
        config.set("KRA", "pki_ds_base_dn", str(self.basedn))
        config.set("KRA", "pki_ds_database", "ipaca")
        config.set("KRA", "pki_ds_create_new_db", "False")

        self._use_ldaps_during_spawn(config)

        # Certificate subject DNs
        config.set("KRA", "pki_subsystem_subject_dn",
                   str(DN(('cn', 'CA Subsystem'), self.subject_base)))
        config.set("KRA", "pki_sslserver_subject_dn",
                   str(DN(('cn', self.fqdn), self.subject_base)))
        config.set("KRA", "pki_audit_signing_subject_dn",
                   str(DN(('cn', 'KRA Audit'), self.subject_base)))
        config.set(
            "KRA", "pki_transport_subject_dn",
            str(DN(('cn', 'KRA Transport Certificate'), self.subject_base)))
        config.set(
            "KRA", "pki_storage_subject_dn",
            str(DN(('cn', 'KRA Storage Certificate'), self.subject_base)))

        # Certificate nicknames
        # Note that both the server certs and subsystem certs reuse
        # the ca certs.
        config.set("KRA", "pki_subsystem_nickname",
                   "subsystemCert cert-pki-ca")
        config.set("KRA", "pki_sslserver_nickname", "Server-Cert cert-pki-ca")
        config.set("KRA", "pki_audit_signing_nickname",
                   "auditSigningCert cert-pki-kra")
        config.set("KRA", "pki_transport_nickname",
                   "transportCert cert-pki-kra")
        config.set("KRA", "pki_storage_nickname", "storageCert cert-pki-kra")

        # Shared db settings
        # Needed because CA and KRA share the same database
        # We will use the dbuser created for the CA
        config.set("KRA", "pki_share_db", "True")
        config.set(
            "KRA", "pki_share_dbuser_dn",
            str(DN(('uid', 'pkidbuser'), ('ou', 'people'), ('o', 'ipaca'))))

        if not (os.path.isdir(paths.PKI_TOMCAT_ALIAS_DIR)
                and os.path.isfile(paths.PKI_TOMCAT_PASSWORD_CONF)):
            # generate pin which we know can be used for FIPS NSS database
            pki_pin = ipautil.ipa_generate_password()
            config.set("KRA", "pki_pin", pki_pin)
        else:
            pki_pin = None

        _p12_tmpfile_handle, p12_tmpfile_name = tempfile.mkstemp(dir=paths.TMP)

        if self.clone:
            krafile = self.pkcs12_info[0]
            shutil.copy(krafile, p12_tmpfile_name)
            pent = pwd.getpwnam(self.service_user)
            os.chown(p12_tmpfile_name, pent.pw_uid, pent.pw_gid)

            # Security domain registration
            config.set("KRA", "pki_security_domain_hostname", self.fqdn)
            config.set("KRA", "pki_security_domain_https_port", "443")
            config.set("KRA", "pki_security_domain_user", self.admin_user)
            config.set("KRA", "pki_security_domain_password",
                       self.admin_password)

            # Clone
            config.set("KRA", "pki_clone", "True")
            config.set("KRA", "pki_clone_pkcs12_path", p12_tmpfile_name)
            config.set("KRA", "pki_clone_pkcs12_password", self.dm_password)
            config.set("KRA", "pki_clone_setup_replication", "False")
            config.set(
                "KRA", "pki_clone_uri",
                "https://%s" % ipautil.format_netloc(self.master_host, 443))
        else:
            # the admin cert file is needed for the first instance of KRA
            cert = self.get_admin_cert()
            # First make sure that the directory exists
            parentdir = os.path.dirname(paths.ADMIN_CERT_PATH)
            if not os.path.exists(parentdir):
                os.makedirs(parentdir)
            with open(paths.ADMIN_CERT_PATH, "wb") as admin_path:
                admin_path.write(
                    base64.b64encode(cert.public_bytes(x509.Encoding.DER)))

        # Generate configuration file
        with open(cfg_file, "w") as f:
            config.write(f)

        try:
            DogtagInstance.spawn_instance(self,
                                          cfg_file,
                                          nolog_list=(self.dm_password,
                                                      self.admin_password,
                                                      pki_pin, tmp_agent_pwd))
        finally:
            os.remove(p12_tmpfile_name)
            os.remove(cfg_file)
            os.remove(admin_p12_file)

        shutil.move(paths.KRA_BACKUP_KEYS_P12, paths.KRACERT_P12)
        logger.debug("completed creating KRA instance")
コード例 #57
0
    def write_acserver_config(self, preset):
        config = RawConfigParser()
        config.optionxform = str
        cfg_file = open(os.path.join(self.acserver_config_dir, 'server_cfg.ini'), 'w')
        config.add_section('SERVER')

        # build a distinct list of car names
        car_list = []
        for driver in preset.entry_set.all():
            if driver.car.dirname not in car_list:
                car_list.append(driver.car.dirname)

        # set max_clients value to the track's pitbox value if null
        if not preset.max_clients:
            preset.max_clients = preset.track.pitboxes

        config.set('SERVER', 'NAME', preset.server_setting.name)
        config.set('SERVER', 'CARS', ';'.join(car_list))
        config.set('SERVER', 'CONFIG_TRACK', xstr(preset.track.subversion))
        config.set('SERVER', 'TRACK', preset.track.dirname)
        config.set('SERVER', 'SUN_ANGLE', time_to_sun_angle(preset.time_of_day))
        config.set('SERVER', 'PASSWORD', str(preset.session_password))
        config.set('SERVER', 'ADMIN_PASSWORD', str(preset.server_setting.admin_password))
        config.set('SERVER', 'UDP_PORT', str(preset.server_setting.udp_port))
        config.set('SERVER', 'TCP_PORT', str(preset.server_setting.tcp_port))
        config.set('SERVER', 'HTTP_PORT', str(preset.server_setting.http_port))
        config.set('SERVER', 'PICKUP_MODE_ENABLED', str(int(preset.pickup_mode_enabled)))
        config.set('SERVER', 'LOOP_MODE', str(int(preset.loop_mode)))
        config.set('SERVER', 'SLEEP_TIME', '1')
        config.set('SERVER', 'CLIENT_SEND_INTERVAL', str(preset.server_setting.client_send_interval))
        config.set('SERVER', 'SEND_BUFFER_SIZE', str(preset.server_setting.send_buffer_size))
        config.set('SERVER', 'RECV_BUFFER_SIZE', str(preset.server_setting.recv_buffer_size))
        config.set('SERVER', 'RACE_OVER_TIME', str(preset.race_over_time))
        config.set('SERVER', 'KICK_QUORUM', str(preset.kick_quorum))
        config.set('SERVER', 'VOTING_QUORUM', str(preset.voting_quorum))
        config.set('SERVER', 'VOTE_DURATION', str(preset.vote_duration))
        config.set('SERVER', 'BLACKLIST_MODE', str(preset.blacklist_mode))
        config.set('SERVER', 'FUEL_RATE', str(preset.fuel_rate))
        config.set('SERVER', 'DAMAGE_MULTIPLIER', str(preset.damage_multiplier))
        config.set('SERVER', 'TYRE_WEAR_RATE', str(preset.tyre_wear_rate))
        config.set('SERVER', 'ALLOWED_TYRES_OUT', str(preset.allowed_tyres_out))
        config.set('SERVER', 'ABS_ALLOWED', str(preset.abs_allowed))
        config.set('SERVER', 'TC_ALLOWED', str(preset.tc_allowed))
        config.set('SERVER', 'STABILITY_ALLOWED', str(int(preset.stability_allowed)))
        config.set('SERVER', 'AUTOCLUTCH_ALLOWED', str(int(preset.autoclutch_allowed)))
        config.set('SERVER', 'TYRE_BLANKETS_ALLOWED', str(int(preset.tyre_blankets_allowed)))
        config.set('SERVER', 'FORCE_VIRTUAL_MIRROR', str(int(preset.force_virtual_mirror)))
        config.set('SERVER', 'REGISTER_TO_LOBBY', '1')
        config.set('SERVER', 'MAX_CLIENTS', str(preset.max_clients))
        config.set('SERVER', 'UDP_PLUGIN_LOCAL_PORT', '11000')
        config.set('SERVER', 'UDP_PLUGIN_ADDRESS', '127.0.0.1:12000')
        config.set('SERVER', 'AUTH_PLUGIN_ADDRESS', '127.0.0.1:50041/acauth?timeout=300&andurl1=www.minorating.com%3A805/minodata/auth/' + preset.server_setting.minorating_grade + '/')
        config.set('SERVER', 'LEGAL_TYRES', 'V;E;HR;ST')
        config.set('SERVER', 'START_RULE', str(preset.start_rule))
        config.set('SERVER', 'QUALIFY_MAX_WAIT_PERC', str(preset.qualify_max_wait_perc))

        if preset.server_setting.welcome_message:
            config.set('SERVER', 'WELCOME_MESSAGE', str(os.path.join(self.acserver_config_dir, 'welcome_message.txt')))

        if preset.practice_time != 0:
            config.add_section('PRACTICE')
            config.set('PRACTICE', 'NAME', 'Free Practice')
            config.set('PRACTICE', 'TIME', str(preset.practice_time))
            config.set('PRACTICE', 'IS_OPEN', str(int(preset.practice_is_open)))

        if preset.qualify_time != 0:
            config.add_section('QUALIFY')
            config.set('QUALIFY', 'NAME', 'Qualify')
            config.set('QUALIFY', 'TIME', str(preset.qualify_time))
            config.set('QUALIFY', 'IS_OPEN', str(int(preset.qualify_is_open)))

        if preset.race_laps != 0:
            config.add_section('RACE')
            config.set('RACE', 'NAME', 'Race')
            config.set('RACE', 'LAPS', str(preset.race_laps))
            config.set('RACE', 'WAIT_TIME', str(preset.race_wait_time))
            config.set('RACE', 'IS_OPEN', str(preset.race_is_open))

        if preset.track_dynamism:
            config.add_section('DYNAMIC_TRACK')
            config.set('DYNAMIC_TRACK', 'SESSION_START', str(preset.track_dynamism.session_start))
            config.set('DYNAMIC_TRACK', 'RANDOMNESS', str(preset.track_dynamism.randomness))
            config.set('DYNAMIC_TRACK', 'SESSION_TRANSFER', str(preset.track_dynamism.session_transfer))
            config.set('DYNAMIC_TRACK', 'LAP_GAIN', str(preset.track_dynamism.lap_gain))

        weather_count = 0
        for weather in preset.weathers.all():
            weather_section = 'WEATHER_' + str(weather_count)
            config.add_section(weather_section)
            config.set(weather_section, 'GRAPHICS', weather.graphics)
            config.set(weather_section, 'BASE_TEMPERATURE_AMBIENT', str(weather.base_temperature_ambient))
            config.set(weather_section, 'VARIATION_AMBIENT', str(weather.variation_ambient))
            config.set(weather_section, 'BASE_TEMPERATURE_ROAD', str(weather.base_temperature_road))
            config.set(weather_section, 'VARIATION_ROAD', str(weather.variation_road))
            weather_count += 1

        config.write(cfg_file, space_around_delimiters=False)
        cfg_file.close()
コード例 #58
0
    def __init__(self, monolithe_config, api_info):
        """ Initializes a _JavaSDKAPIVersionFileWriter

        """
        super(APIVersionWriter,
              self).__init__(package="monolithe.generators.lang.vro")

        self.api_version = api_info["version"]
        self._api_version_string = SDKUtils.get_string_version(
            self.api_version)
        self.api_root = api_info["root"]
        self.api_prefix = api_info["prefix"]

        self.monolithe_config = monolithe_config
        self._output = self.monolithe_config.get_option(
            "output", "transformer")
        self._name = self.monolithe_config.get_option("name", "transformer")
        self._class_prefix = ""
        self._product_accronym = self.monolithe_config.get_option(
            "product_accronym")
        self._product_name = self.monolithe_config.get_option("product_name")
        self._url = self.monolithe_config.get_option("url", "transformer")

        self._package_prefix = self._get_package_prefix(self._url)
        self._package_name = self._package_prefix + ".vro." + self._name
        self._package_subdir = self._package_name.replace('.', '/')

        self.output_directory = "%s/vro" % (self._output)
        self.override_folder = os.path.normpath("%s/__overrides" %
                                                self.output_directory)
        self.fetchers_path = "/fetchers/"
        self.enums_path = "/enums/"

        self.attrs_defaults = RawConfigParser()
        path = "%s/vro/__attributes_defaults/attrs_defaults.ini" % self._output
        self.attrs_defaults.optionxform = str
        self.attrs_defaults.read(path)

        self.inventory_entities = RawConfigParser()
        path = "%s/vro/__attributes_defaults/inventory_entities.ini" % self._output
        self.inventory_entities.optionxform = str
        self.inventory_entities.read(path)

        self.workflow_attrs = RawConfigParser()
        path = "%s/vro/__attributes_defaults/workflow_attrs.ini" % self._output
        self.workflow_attrs.optionxform = str
        self.workflow_attrs.read(path)

        self.attrs_types = RawConfigParser()
        path = "%s/vro/__attributes_defaults/attrs_types.ini" % self._output
        self.attrs_types.optionxform = str
        self.attrs_types.read(path)

        plugin_info = RawConfigParser()
        path = "%s/vro/__attributes_defaults/plugin.ini" % self._output
        plugin_info.optionxform = str
        plugin_info.read(path)
        self.plugin_version = plugin_info.get(self.api_version,
                                              "pluginVersion")

        workflow_info = RawConfigParser()
        path = "%s/vro/__attributes_defaults/workflow.ini" % self._output
        workflow_info.optionxform = str
        workflow_info.read(path)
        self.workflow_version = workflow_info.get(self.api_version,
                                                  "workflowVersion")

        with open("%s/vro/__code_header" % self._output, "r") as f:
            self.header_content = f.read()
コード例 #59
0
ファイル: _display.py プロジェクト: kdavies4/ModelicaRes
    def find(self, dimension):
        """Return the display unit for a particular dimension.

        **Parameters:**

        - *dimension*: Dictionary of base dimensions and exponents

        **Returns:** The display unit as a string
        """
        return '.'.join([self[base][1] if exp == 1 else self[base][1] + str(exp)
                         for base, exp in dimension.items()])

# Load the default display units.
try:
    config = RawConfigParser(interpolation=None,
                             inline_comment_prefixes=[';'])
except TypeError:
    config = RawConfigParser()
config.optionxform = str  # Dimensions are case sensitive.
config.read(path.join(dname, 'display.ini'))
default_display_units = DisplayUnits(config.items('Default display units'))


if __name__ == '__main__':
    # Test the contents of this file.

    import doctest

    doctest.testmod()
コード例 #60
0
{{ agent }}.sinks.{{ name }}_sink.hdfs.minBlockReplicas = 1
{{ agent }}.sinks.{{ name }}_sink.hdfs.rollInterval = 0
{{ agent }}.sinks.{{ name }}_sink.hdfs.rollSize = 0 
{{ agent }}.sinks.{{ name }}_sink.hdfs.rollCount = 0
{{ agent }}.sinks.{{ name }}_sink.hdfs.idleTimeout = 5
{{ agent }}.sinks.{{ name }}_sink.hdfs.filePrefix = %{basename}

{{ agent }}.channels.{{ name }}_ch.type = memory
###############################################################
###############################################################
{% endfor %}
'''
if len(argv) > 1:
    config = argv[1]
else:
    print('#Set Config File to flume_source_sink.ini')
    config = 'conf_template/flume_source_sink.ini'

config_info = RawConfigParser()
config_info.optionxform = str

config_info.read(config)
dirdict = {
    name: dict(info)
    for name, info in dict(config_info).items() if name != 'DEFAULT'
}
agent = config_info['DEFAULT']['agent']
flume_template = Template(text)
rt = flume_template.render(agent=agent, dirdict=dirdict)
stdout.write(rt)