def install_mercurial_hook(): """ Installs the mercurial precommit hook by adding a hook to the hgrc file in the .hg directory of the repository. """ repo_dir = get_repo_dir() config_file = os.path.join(repo_dir, '.hg', 'hgrc') config_parser = RawConfigParser() config_parser.read(config_file) precommit_abs_file = os.path.join(repo_dir, 'scripts', 'codestyleprecommit.py') section = 'hooks' key = 'pretxncommit.precommit' value = 'python:%s:mercurial_hook' % precommit_abs_file if not config_parser.has_section(section): config_parser.add_section(section) config_parser.set(section, key, value) with open(config_file, 'w') as config: config_parser.write(config)
class OriginAuthStore(object): def __init__(self, config_file): self.config_file = config_file self.config = RawConfigParser() self.config.read(config_file) def origin(self, name): return OriginAuth(self, name) def __getitem__(self, origin_name): try: return dict(self.config.items(origin_name)) except NoSectionError: return {} def __setitem__(self, origin_name, auth): try: self.config.remove_section(origin_name) except NoSectionError: pass if auth: self.config.add_section(origin_name) for key, val in auth.iteritems(): self.config.set(origin_name, key, val) with open(self.config_file, 'w') as f: self.config.write(f) try: os.chmod(self.config_file, stat.S_IRUSR | stat.S_IWUSR) except OSError: print 'Unable to chmod 600 %s' % self.config_file # TODO: Test
def test_write_yes_map(): cfg = RawConfigParser() cfg.add_section('group fooers') cfg.set('group fooers', 'members', 'jdoe') cfg.set('group fooers', 'map writable foo/bar', 'quux/thud') eq(access.haveAccess(config=cfg, user='******', mode='writable', path='foo/bar'), ('repositories', 'quux/thud'))
def test_valid_rule(self): """ Checks that rules are parsed correctly """ config = RawConfigParser() config.add_section("rule1") config.set("rule1", "check", "valid_checker") config.set("rule1", "check.param_true", "true") config.set("rule1", "check.param_number", "1337") config.set("rule1", "check.param_str", '"foobar"') config.set("rule1", "check.param_obj", '{"foo":"bar"}') config.set("rule1", "check.param_arr", '[true, 1337, ["foobar"]]') config.set("rule1", "filter_pattern", "foo") config.set("rule1", "filter_field", "bar") config.set("rule1", "filter_collection", "baz") config.set("rule1", "holdingpen", "true") config.add_section("rule2") config.set("rule2", "check", "other_checker") rule1 = load_rule(config, PLUGINS_MOCK, "rule1") rule2 = load_rule(config, PLUGINS_MOCK, "rule2") self.assertEqual(rule1["check"], "valid_checker") self.assertTrue(rule1["checker_params"]["param_true"]) self.assertEqual(rule1["checker_params"]["param_number"], 1337) self.assertEqual(rule1["checker_params"]["param_str"], "foobar") self.assertEqual(rule1["checker_params"]["param_obj"], {"foo": "bar"}) self.assertEqual(rule1["checker_params"]["param_arr"], [True, 1337, ["foobar"]]) self.assertEqual(rule1["filter_pattern"], "foo") self.assertEqual(rule1["filter_field"], "bar") self.assertEqual(rule1["filter_collection"], "baz") self.assertEqual(rule1["holdingpen"], True) self.assertEqual(rule2["check"], "other_checker")
def test_read_yes_all(): cfg = RawConfigParser() cfg.add_section('group fooers') cfg.set('group fooers', 'members', '@all') cfg.set('group fooers', 'readonly', 'foo/bar') eq(access.haveAccess(config=cfg, user='******', mode='readonly', path='foo/bar'), ('repositories', 'foo/bar'))
class Configuration(object): def __init__(self, filename): self.parser = RawConfigParser() filepath = os.path.expanduser(filename) if os.path.exists(filepath): self.parser.read(filepath) else: # Set up config file self.parser.add_section('jira_default') self.parser.set('jira_default', 'username', raw_input('username: '******'password: '******'confirm: ') if passwd != passwd2: print 'passwords do not match.' else: passwd_confirmed = True self.parser.set('jira_default', 'password', passwd) self.parser.set('jira_default', 'host', raw_input('host (e.g jira.atlassian.com): ')) self.parser.set('jira_default', 'path', '/rest/api/latest') # Color-coded statuses #self.parser.set('colors', 'Resolved', 'green') #self.parser.set('colors', 'In Progress', 'magenta') f = open(filepath, 'w') self.parser.write(f) os.chmod(filepath, 0600) #Only user can read this
def test_push_inits_no_stdout_spam(): # git init has a tendency to spew to stdout, and that confuses # e.g. a git push tmp = util.maketemp() cfg = RawConfigParser() cfg.add_section('gitosis') repositories = os.path.join(tmp, 'repositories') os.mkdir(repositories) cfg.set('gitosis', 'repositories', repositories) generated = os.path.join(tmp, 'generated') os.mkdir(generated) cfg.set('gitosis', 'generate-files-in', generated) cfg.add_section('group foo') cfg.set('group foo', 'members', 'jdoe') cfg.set('group foo', 'writable', 'foo') old_stdout = os.dup(1) try: new_stdout = os.tmpfile() os.dup2(new_stdout.fileno(), 1) serve.serve( cfg=cfg, user='******', command="git-receive-pack 'foo'", ) finally: os.dup2(old_stdout, 1) os.close(old_stdout) new_stdout.seek(0) got = new_stdout.read() new_stdout.close() eq(got, '') eq(os.listdir(repositories), ['foo.git']) assert os.path.isfile(os.path.join(repositories, 'foo.git', 'HEAD'))
def test_push_inits_subdir_parent_exists(): tmp = util.maketemp() cfg = RawConfigParser() cfg.add_section('gitosis') repositories = os.path.join(tmp, 'repositories') os.mkdir(repositories) foo = os.path.join(repositories, 'foo') # silly mode on purpose; not to be touched os.mkdir(foo, 0751) cfg.set('gitosis', 'repositories', repositories) generated = os.path.join(tmp, 'generated') os.mkdir(generated) cfg.set('gitosis', 'generate-files-in', generated) cfg.add_section('group foo') cfg.set('group foo', 'members', 'jdoe') cfg.set('group foo', 'writable', 'foo/bar') serve.serve( cfg=cfg, user='******', command="git-receive-pack 'foo/bar.git'", ) eq(os.listdir(repositories), ['foo']) util.check_mode(foo, 0751, is_dir=True) eq(os.listdir(foo), ['bar.git']) assert os.path.isfile(os.path.join(repositories, 'foo', 'bar.git', 'HEAD'))
def add_backend(self, backend_name, module_name, params, edit=False): """ Add a backend to config. :param backend_name: name of the backend in config :param module_name: name of the Python submodule to run :param params: params to pass to the module :type params: :class:`dict` """ if not backend_name: raise ValueError(u'Please give a name to the configured backend.') config = RawConfigParser() config.read(self.confpath) if not edit: try: config.add_section(backend_name) except DuplicateSectionError: raise BackendAlreadyExists(backend_name) config.set(backend_name, '_module', module_name) for key, value in params.iteritems(): if isinstance(value, unicode): value = value.encode('utf-8') config.set(backend_name, key, value) with open(self.confpath, 'wb') as f: config.write(f)
def test_write_no_simple_wouldHaveReadonly(): cfg = RawConfigParser() cfg.add_section('group fooers') cfg.set('group fooers', 'members', 'jdoe') cfg.set('group fooers', 'readonly', 'foo/bar') eq(access.haveAccess(config=cfg, user='******', mode='writable', path='foo/bar'), None)
def test_no_notListed(): cfg = RawConfigParser() cfg.add_section('group hackers') cfg.set('group hackers', 'members', 'wsmith') gen = group.getMembership(config=cfg, user='******') eq(gen.next(), 'all') assert_raises(StopIteration, gen.next)
def create_config_file(config_file, random_music_home): """ Create a configuration file. :param config_file: path to config file we are creating :type config_file: str :param random_music_home: home of random_music application (i.e. where index files are stored :type random_music_home: str """ sys.stdout.write("You do not appear to have a config file, lets create one!\n") sys.stdout.write("Creating config file at %s\n" % config_file) config = RawConfigParser() config.add_section('config') config.set('config', 'loop_songs', 'true') config.set('config', 'randomise', 'true') config.set('config', 'index_dir', os.path.join(random_music_home, "indicies")) music_client = DEFAULT_MUSIC_CLIENT while not which(music_client): music_client = raw_input("The music player '%s' could not be found " "on your path. Please input a different " "music player:" % music_client) config.set('config', 'music_client', music_client) user_music_dirs = "" while not all([os.path.isdir(d) for d in user_music_dirs.split(",")]): user_music_dirs = raw_input("Input a csv list of full paths to " "your music dirs:") config.set('config', 'music_dirs', user_music_dirs) with open(config_file, 'wb') as fh: config.write(fh)
def parse_probe(probename, probe, parentname, parent): probe_path = parentname + probename probename = probename[1:] probe_type = probe.pop("probe type", None) options = RawConfigParser() # probe metadata if "metadata" in probe: probe_metadata = dict() for k,v in probe.pop("metadata").items(): probe_metadata[str(k)] = str(v) else: probe_metadata = dict() # probe policy options = RawConfigParser() options.add_section("policy") for k,v in probe.pop("policy", dict()).items(): options.set("policy", str(k), str(v)) probe_policy = Section("policy", options, os.path.dirname(systemfile.path)) # probe settings options = RawConfigParser() options.add_section("settings") for k in filter(lambda k: not k.startswith("/"), probe.keys()): options.set("settings", str(k), str(probe.pop(k))) probe_settings = Section("settings", options, os.path.dirname(systemfile.path)) # probe children children = dict() for childname,childprobe in probe.items(): parse_probe(childname, childprobe, probe_path, children) parent[probename] = ProbeSpec(probe_path, probe_type, probe_settings, probe_metadata, probe_policy, children)
def write(self, config_data, filepath=None): """ Create a dotfile from keyword arguments. :param config_data: Dict of config settings :param filepath: (Optional) Path to write """ if filepath is None: filepath = self.filepath config = RawConfigParser() section = constants.SECTION_NAME config.add_section(section) # Set the config settings for key, val in config_data.iteritems(): config.set(section, key, val) with open(filepath, 'wb') as dotfile: config.write(dotfile) self.enforce_perms() log.debug('wrote %s' % filepath)
def test_push_inits_no_stdout_spam(): # git init has a tendency to spew to stdout, and that confuses # e.g. a git push tmp = util.maketemp() cfg = RawConfigParser() cfg.add_section("gitosis") repositories = os.path.join(tmp, "repositories") os.mkdir(repositories) cfg.set("gitosis", "repositories", repositories) generated = os.path.join(tmp, "generated") os.mkdir(generated) cfg.set("gitosis", "generate-files-in", generated) cfg.add_section("group foo") cfg.set("group foo", "members", "jdoe") cfg.set("group foo", "writable", "foo") old_stdout = os.dup(1) try: new_stdout = os.tmpfile() os.dup2(new_stdout.fileno(), 1) serve.serve(cfg=cfg, user="******", command="git-receive-pack 'foo'") finally: os.dup2(old_stdout, 1) os.close(old_stdout) new_stdout.seek(0) got = new_stdout.read() new_stdout.close() eq(got, "") eq(os.listdir(repositories), ["foo.git"]) assert os.path.isfile(os.path.join(repositories, "foo.git", "HEAD"))
def install_mercurial_hook(): """ Installs the mercurial precommit hook by adding a hook to the hgrc file in the .hg directory of the repository. """ repo_dir = get_repo_dir() config_file = os.path.join(repo_dir, ".hg", "hgrc") config_parser = RawConfigParser() config_parser.read(config_file) precommit_abs_file = os.path.join(repo_dir, "scripts", "codestyleprecommit.py") section = "hooks" key = "pretxncommit.precommit" value = "python:%s:mercurial_hook" % precommit_abs_file if not config_parser.has_section(section): config_parser.add_section(section) config_parser.set(section, key, value) with open(config_file, "w") as config: config_parser.write(config)
def next_serial(serial_file=CA_SERIALNO): """ Get the next serial number if we're using an NSS-based self-signed CA. The file is an ini-like file with following properties: lastvalue = the last serial number handed out nextreplica = the serial number the next replica should start with replicainterval = the number to add to nextreplica the next time a replica is created File locking is attempted so we have unique serial numbers. """ fp = None parser = RawConfigParser() if ipautil.file_exists(serial_file): try: fp = open(serial_file, "r+") fcntl.flock(fp.fileno(), fcntl.LOCK_EX) parser.readfp(fp) serial = parser.getint('selfsign', 'lastvalue') cur_serial = serial + 1 except IOError, e: raise RuntimeError("Unable to determine serial number: %s" % str(e)) except MissingSectionHeaderError: fcntl.flock(fp.fileno(), fcntl.LOCK_UN) fp.close() f=open(serial_file,"r") r = f.readline() f.close() cur_serial = int(r) + 1 fp = open(serial_file, "w") fcntl.flock(fp.fileno(), fcntl.LOCK_EX) parser.add_section('selfsign') parser.set('selfsign', 'nextreplica', 500000) parser.set('selfsign', 'replicainterval', 500000)
def test_typo_writeable(): tmp = util.maketemp() repository.init(os.path.join(tmp, 'foo.git')) cfg = RawConfigParser() cfg.add_section('gitosis') cfg.set('gitosis', 'repositories', tmp) cfg.add_section('group foo') cfg.set('group foo', 'members', 'jdoe') cfg.set('group foo', 'writeable', 'foo') log = logging.getLogger('gitosis.serve') buf = StringIO() handler = logging.StreamHandler(buf) log.addHandler(handler) try: got = serve.serve( cfg=cfg, user='******', command="git-receive-pack 'foo'", ) finally: log.removeHandler(handler) eq(got, "git-receive-pack '%s/foo.git'" % tmp) handler.flush() eq( buf.getvalue(), "Repository 'foo' config has typo \"writeable\", shou" +"ld be \"writable\"\n", )
def test_user(): cfg = RawConfigParser() cfg.add_section('user jdoe') cfg.set('user jdoe', 'readonly', 'foo xyzzy bar') eq(access.haveAccess( config=cfg, user='******', mode='readonly', path='xyzzy'), ('repositories', 'xyzzy'))
class ApplicationConfig(object): """A thin wrapper around ConfigParser that remembers what we read. The remembered settings can then be written out to a minimal config file when building the Elastic Beanstalk zipfile. """ def __init__(self): self.input = RawConfigParser() with open("production.ini") as f: self.input.readfp(f) self.output = RawConfigParser() def get(self, section, key): value = self.input.get(section, key) # remember that we needed this configuration value if (section.upper() != "DEFAULT" and not self.output.has_section(section)): self.output.add_section(section) self.output.set(section, key, value) return value def to_config(self): io = cStringIO.StringIO() self.output.write(io) return io.getvalue()
def test_read_yes_map_wouldHaveWritable(): cfg = RawConfigParser() cfg.add_section('group fooers') cfg.set('group fooers', 'members', 'jdoe') cfg.set('group fooers', 'map writable foo/bar', 'quux/thud') eq(access.haveAccess(config=cfg, user='******', mode='readonly', path='foo/bar'), None)
def load_configs(shutit): """Responsible for loading config files into ShutIt. Recurses down from configured shutit module paths. """ cfg = shutit.cfg # Get root default config. configs = [('defaults', StringIO.StringIO(_default_cnf))] # Add the shutit global host- and user-specific config file. configs.append(os.path.join(shutit.shutit_main_dir, 'configs/' + socket.gethostname() + '_' + cfg['host']['real_user'] + '.cnf')) configs.append(os.path.join(cfg['shutit_home'], 'config')) # Add the local build.cnf configs.append('configs/build.cnf') # Get passed-in config(s) for config_file_name in cfg['build']['extra_configs']: run_config_file = os.path.expanduser(config_file_name) if not os.path.isfile(run_config_file): print('Did not recognise ' + run_config_file + ' as a file - do you need to touch ' + run_config_file + '?') sys.exit() configs.append(run_config_file) # Image to use to start off. The script should be idempotent, so running it # on an already built image should be ok, and is advised to reduce diff space required. if cfg['build']['interactive'] >= 3 or cfg['action']['show_config']: msg = '' for c in configs: if type(c) is tuple: c = c[0] msg = msg + '\t\n' + c shutit.log('\t' + c) if cfg['build']['interactive'] >= 3: print textwrap.dedent("""\n""") + msg + textwrap.dedent(""" Looking at config files in the above order (even if they do not exist - you may want to create them). If you get a "Port already in use:" error, run: docker ps -a | grep -w <port> | awk '{print $1}' | xargs docker kill or sudo docker ps -a | grep -w <port> | awk '{print $1}' | xargs sudo docker kill """ + colour('31','[Hit return to continue]')) raw_input('') # Interpret any config overrides, write to a file and add them to the # list of configs to be interpreted if cfg['build']['config_overrides']: # We don't need layers, this is a temporary configparser override_cp = RawConfigParser() for o_sec, o_key, o_val in cfg['build']['config_overrides']: if not override_cp.has_section(o_sec): override_cp.add_section(o_sec) override_cp.set(o_sec, o_key, o_val) override_fd = StringIO.StringIO() override_cp.write(override_fd) override_fd.seek(0) configs.append(('overrides', override_fd)) cfg_parser = get_configs(shutit,configs) get_base_config(cfg, cfg_parser)
def test_init_yes_simple(): cfg = RawConfigParser() cfg.add_section('group fooers') cfg.set('group fooers', 'members', 'jdoe') cfg.set('group fooers', 'init', 'foo/bar') eq(access.haveAccess(config=cfg, user='******', mode='init', path='foo/bar'), ('repositories', 'foo/bar', 'init'))
def create_new_conf_from_modifications(self): """ Return a new RawConfigParser instance that has been created from the non-default modifications returned by the `modifications` property above. """ # This is a bit hacky as the underlying config classes don't really # support the notion of "only write out sections/options that have # changed since we loaded the defaults". if not self.repo_path: raise RepositoryNotSet() mods = self.modifications if not mods: raise NoModificationsMade() filename = self.writable_repo_override_conf_filename conf = RawConfigParser() conf.read(filename) for (section, options) in mods.items(): conf.add_section(section) for (option, value) in options.items(): conf.set(section, option, value) return conf
def test_imap_config_values_should_be_stored(): am = AccountManager() option_spec = get_mailbox_parameter_specs('imap') options = { 'user': '******', 'password': '', 'server': 'imap.example.org', 'port': '', 'ssl': True, 'imap': True, 'idle': True, 'folders': ['a', 'b'], } config = RawConfigParser() config.add_section('account1') am._set_cfg_options(config, 'account1', options, option_spec) expected_config_items = [ ('user', 'you'), ('password', ''), ('server', 'imap.example.org'), ('port', ''), ('ssl', '1'), ('imap', '1'), ('idle', '1'), ('folder', '["a", "b"]'), ] assert set(expected_config_items) == set(config.items('account1'))
def save(self, filename=None): if filename is None: filename = self.write_path self.before_save() config_parser = RawConfigParser() #config_parser.readfp( for section_name, section in self.sections.items(): config_parser.add_section(section_name) for item in section.items: type_process = { str: str, bool: str, int: str, float: str, "pickle": do_pickling }[section.item_types[item]] # look it up now. If this is a lazily evaluated item, find its # value before we close # TODO: is this what we really want to do? value = section[item] config_parser.set(section_name, item, type_process(value)) directory = os.path.dirname(filename) if not os.path.exists(directory): os.makedirs(directory) config_parser.write(open(filename, "w"))
def save(self, filename, private=False): """ Save repository into a file (modules.list for example). :param filename: path to file to save repository. :type filename: str :param private: if enabled, save URL of repository. :type private: bool """ config = RawConfigParser() config.set(DEFAULTSECT, 'name', self.name) config.set(DEFAULTSECT, 'update', self.update) config.set(DEFAULTSECT, 'maintainer', self.maintainer) config.set(DEFAULTSECT, 'signed', int(self.signed)) config.set(DEFAULTSECT, 'key_update', self.key_update) if private: config.set(DEFAULTSECT, 'url', self.url) for module in self.modules.itervalues(): config.add_section(module.name) for key, value in module.dump(): config.set(module.name, key, to_unicode(value).encode('utf-8')) with open(filename, 'wb') as f: config.write(f)
def save(self, filename, private=False): """ Save repository into a file (modules.list for example). :param filename: path to file to save repository. :type filename: str :param private: if enabled, save URL of repository. :type private: bool """ config = RawConfigParser() config.set(DEFAULTSECT, 'name', self.name) config.set(DEFAULTSECT, 'update', self.update) config.set(DEFAULTSECT, 'maintainer', self.maintainer) config.set(DEFAULTSECT, 'signed', int(self.signed)) config.set(DEFAULTSECT, 'key_update', self.key_update) if private: config.set(DEFAULTSECT, 'url', self.url) for module in self.modules.values(): config.add_section(module.name) for key, value in module.dump(): if sys.version_info.major == 2: # python2's configparser enforces bytes coercion with str(value)... config.set(module.name, key, to_unicode(value).encode('utf-8')) else: config.set(module.name, key, value) with open_for_config(filename) as f: config.write(f)
def save(): """Saves FileDirectives into ConfigFile.""" section = "*" module = sys.modules[__name__] parser = RawConfigParser() parser.optionxform = str # Force case-sensitivity on names parser.add_section(section) try: f, fname = open(ConfigFile, "wb"), util.longpath(ConfigFile) f.write("# %s configuration autowritten on %s.\n" % (fname, datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))) for name in FileDirectives: try: parser.set(section, name, json.dumps(getattr(module, name))) except Exception: pass for name in OptionalFileDirectives: try: value = getattr(module, name, None) if OptionalFileDirectiveDefaults.get(name) != value: parser.set(section, name, json.dumps(value)) except Exception: pass parser.write(f) f.close() except Exception: pass # Fail silently
def getAccount(account_key): from ConfigParser import RawConfigParser, NoOptionError, NoSectionError account_file = '.account' config = RawConfigParser() with open(account_file, 'r') as fp: config.readfp(fp) account = config.get('account', account_key) password = None password_section = 'password' try: password = config.get(password_section, account_key) except NoSectionError: config.add_section(password_section) except NoOptionError: pass aes = AESCipher(account) if password: return account, aes.decrypt(password).encode('UTF-8') from getpass import getpass password = getpass(account_key + ' of ' +account + "'s password: ") config.set(password_section, account_key, aes.encrypt(password)) with open(account_file, 'w') as fp: config.write(fp) return account, password
def test_base_global_relative_simple(): cfg = RawConfigParser() cfg.add_section('gitosis') cfg.set('gitosis', 'repositories', 'some/relative/path') cfg.add_section('group fooers') cfg.set('group fooers', 'members', 'jdoe') cfg.set('group fooers', 'readonly', 'foo xyzzy bar') eq( access.haveAccess(config=cfg, user='******', mode='readonly', path='xyzzy'), ('some/relative/path', 'xyzzy', 'read')) eq(access.haveAccess(config=cfg, user='******', mode='read', path='xyzzy'), ('some/relative/path', 'xyzzy', 'read'))
def execute(self, instance): filename = self.evaluate_expression(self.get_parameter(0)) try: file = open_file(filename, 'wb') except IOError: return config = RawConfigParser() config.optionxform = str value = instance.objectPlayer.split() config.add_section('List') config.set('List', 'Elements', str(len(value))) for i, item in enumerate(value): config.set('List', 'Element%s' % (i + 1), item) config.write(file) file.close()
def test_bad_forbiddenCommand_write_readAccess_dash(): cfg = RawConfigParser() cfg.add_section('group foo') cfg.set('group foo', 'members', 'jdoe') cfg.set('group foo', 'readonly', 'foo') e = assert_raises( serve.WriteAccessDenied, serve.serve, cfg=cfg, user='******', command="git-receive-pack 'foo'", ) eq(str(e), 'Repository write access denied') assert isinstance(e, serve.AccessDenied) assert isinstance(e, serve.ServingError)
def test_simple_write_space(): tmp = util.maketemp() repository.init(os.path.join(tmp, 'foo.git')) cfg = RawConfigParser() cfg.add_section('gitosis') cfg.set('gitosis', 'repositories', tmp) cfg.add_section('group foo') cfg.set('group foo', 'members', 'jdoe') cfg.set('group foo', 'writable', 'foo') got = serve.serve( cfg=cfg, user='******', command="git receive-pack 'foo'", ) eq(got, "git receive-pack '%s/foo.git'" % tmp)
def setUp(self): section = "URIExtract" tlds = "com net org\n .co.uk ch ru" open('/tmp/tld.txt', 'w').write(tlds) skiplist = "skipme.com meetoo.com" open('/tmp/domainskiplist.txt', 'w').write(skiplist) config = RawConfigParser() config.add_section(section) config.set(section, 'tldfiles', "/tmp/tld.txt") config.set(section, 'domainskiplist', "/tmp/domainskiplist.txt") self.candidate = URIExtract(config, section) self.candidate._prepare()
def test_projectsList_path(): tmp = maketemp() path = os.path.join(tmp, 'foo.git') mkdir(path) cfg = RawConfigParser() cfg.add_section('gitosis') cfg.set('gitosis', 'repositories', tmp) cfg.add_section('repo foo') cfg.set('repo foo', 'gitweb', 'yes') projects_list = os.path.join(tmp, 'projects.list') gitweb.generate_project_list(config=cfg, path=projects_list) got = readFile(projects_list) eq(got, '''\ foo.git ''')
def test_description_not_set(): tmp = maketemp() path = os.path.join(tmp, 'foo.git') mkdir(path) writeFile( os.path.join(path, 'description'), 'i was here first\n', ) cfg = RawConfigParser() cfg.add_section('gitosis') cfg.set('gitosis', 'repositories', tmp) cfg.add_section('repo foo') gitweb.set_descriptions(config=cfg, ) got = readFile(os.path.join(path, 'description')) eq(got, 'i was here first\n')
def test_write_no_simple_wouldHaveReadonly(): cfg = RawConfigParser() cfg.add_section('group fooers') cfg.set('group fooers', 'members', 'jdoe') cfg.set('group fooers', 'readonly', 'foo/bar') eq( access.haveAccess(config=cfg, user='******', mode='writable', path='foo/bar'), None) eq( access.haveAccess(config=cfg, user='******', mode='write', path='foo/bar'), None)
def test_simple_read_archive(): tmp = util.maketemp() repository.init(os.path.join(tmp, 'foo.git')) cfg = RawConfigParser() cfg.add_section('gitosis') cfg.set('gitosis', 'repositories', tmp) cfg.add_section('group foo') cfg.set('group foo', 'members', 'jdoe') cfg.set('group foo', 'readonly', 'foo') got = serve.serve( cfg=cfg, user='******', command="git upload-archive 'foo'", ) eq(got, "git upload-archive '%s/foo.git'" % tmp)
def export_ini(self): """ Exports the current configuration to an ini file format """ contents = RawConfigParser() data = self.export() sections = data.keys() sections.remove('global') for section in ['global'] + sorted(sections): contents.add_section(section) for item in sorted(data[section]): contents.set(section, item, data[section][item]) config_io = StringIO() contents.write(config_io) return str(config_io.getvalue())
def convert_config_to_tribler71(current_config, state_dir=None): """ Convert the Config files libtribler.conf and tribler.conf to the newer triblerd.conf and cleanup the files when we are done. :param: current_config: the current config in which we merge the old config files. :return: the newly edited TriblerConfig object with the old data inserted. """ state_dir = state_dir or TriblerConfig.get_default_state_dir() libtribler_file_loc = os.path.join(state_dir, "libtribler.conf") if os.path.exists(libtribler_file_loc): libtribler_cfg = RawConfigParser() libtribler_cfg.read(libtribler_file_loc) current_config = add_libtribler_config(current_config, libtribler_cfg) os.remove(libtribler_file_loc) tribler_file_loc = os.path.join(state_dir, "tribler.conf") if os.path.exists(tribler_file_loc): tribler_cfg = RawConfigParser() tribler_cfg.read(tribler_file_loc) current_config = add_tribler_config(current_config, tribler_cfg) os.remove(tribler_file_loc) # We also have to update all existing downloads, in particular, rename the section 'downloadconfig' to # 'download_defaults'. for _, filename in enumerate(iglob( os.path.join(state_dir, STATEDIR_DLPSTATE_DIR, '*.state'))): download_cfg = RawConfigParser() try: with open(filename) as cfg_file: download_cfg.readfp(cfg_file, filename=filename) except MissingSectionHeaderError: logger.error("Removing download state file %s since it appears to be corrupt", filename) os.remove(filename) try: download_items = download_cfg.items("downloadconfig") download_cfg.add_section("download_defaults") for download_item in download_items: download_cfg.set("download_defaults", download_item[0], download_item[1]) download_cfg.remove_section("downloadconfig") with open(filename, "w") as output_config_file: download_cfg.write(output_config_file) except (NoSectionError, DuplicateSectionError): # This item has already been converted pass return current_config
def save(): """Saves FileDirectives into ConfigFile.""" configpaths = [ConfigFile] try: p = appdirs.user_config_dir(Title, appauthor=False) userpath = os.path.join(p, "%s.ini" % Title.lower()) # Pick only userpath if exists, else try application folder first if os.path.isfile(userpath): configpaths = [userpath] else: configpaths.append(userpath) except Exception: pass section = "*" module = sys.modules[__name__] parser = RawConfigParser() parser.optionxform = str # Force case-sensitivity on names parser.add_section(section) try: for path in configpaths: try: os.makedirs(os.path.dirname(path)) except Exception: pass try: f = open(path, "wb") except Exception: continue # for path else: break # for path f.write("# %s configuration written on %s.\n" % (Title, datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))) for name in FileDirectives: try: parser.set(section, name, json.dumps(getattr(module, name))) except Exception: pass for name in OptionalFileDirectives: try: value = getattr(module, name, None) if Defaults.get(name) != value: parser.set(section, name, json.dumps(value)) except Exception: pass parser.write(f) f.close() except Exception: pass # Fail silently
def write_config(self, client): """ Writes the configuration down to in the format expected by Arakoon """ (temp_handle, temp_filename) = tempfile.mkstemp() contents = RawConfigParser() data = self.export() for section in data: contents.add_section(section) for item in data[section]: contents.set(section, item, data[section][item]) with open(temp_filename, 'wb') as config_file: contents.write(config_file) client.dir_create(self._dir) client.file_upload(self._filename, temp_filename) os.remove(temp_filename)
def test_description_default(): tmp = maketemp() path = os.path.join(tmp, 'foo.git') mkdir(path) writeFile( os.path.join(path, 'description'), 'Unnamed repository; edit this file to name it for gitweb.\n', ) cfg = RawConfigParser() cfg.add_section('gitosis') cfg.set('gitosis', 'repositories', tmp) cfg.add_section('repo foo') cfg.set('repo foo', 'description', 'foodesc') gitweb.set_descriptions(config=cfg, ) got = readFile(os.path.join(path, 'description')) eq(got, 'foodesc\n')
def add_project(project_name, specfile): """Add a project to the global configuration file. :param project_name: Name of the project :param specfile: path to the data dictionary used by the project. :type project_name: str :type specfile: str :return: None """ path = locate_config_file() parser = RawConfigParser() parser.read(path) parser.add_section(project_name) parser.set(project_name, "specfile", specfile) with open(path, "w") as f: parser.write(f)
def test_projectsList_reallyEndsWithGit(): tmp = maketemp() path = os.path.join(tmp, 'foo.git') mkdir(path) cfg = RawConfigParser() cfg.add_section('gitosis') cfg.set('gitosis', 'repositories', tmp) cfg.add_section('repo foo') cfg.set('repo foo', 'gitweb', 'yes') got = StringIO() gitweb.generate_project_list_fp( config=cfg, fp=got) eq(got.getvalue(), '''\ foo.git ''')
def setUp(self): self.testfile = "/tmp/fuglu_override_test.db" if os.path.exists(self.testfile): os.remove(self.testfile) # important: 4 slashes for absolute paths! self.testdb = "sqlite:///%s" % self.testfile config = RawConfigParser() config.add_section('databaseconfig') config.set('databaseconfig', 'dbconnectstring', self.testdb) config.set( 'databaseconfig', "sql", "SELECT value FROM fugluconfig WHERE section=:section AND option=:option AND scope IN ('$GLOBAL','%'||:to_domain,:to_address) ORDER BY SCOPE DESC" ) self.config = config self.create_database()
def generate_token_file(self, token_file, oauth_token, oauth_token_secret): self.oauth_token = oauth_token self.oauth_token_secret = oauth_token_secret conf = RawConfigParser() conf.add_section(SECTION_TOKEN) conf.set(SECTION_TOKEN, 'oauth_token', oauth_token) conf.set(SECTION_TOKEN, 'oauth_token_secret', oauth_token_secret) with open(token_file, 'wb') as tokens: conf.write(tokens) print encode(_('your account has been saved'))
def set_serial_from_pkcs12(self): """A CA cert was loaded from a PKCS#12 file. Set up our serial file""" cur_serial = self.find_cacert_serial() try: fp = open(CA_SERIALNO, "w") parser = RawConfigParser() parser.add_section('selfsign') parser.set('selfsign', 'lastvalue', cur_serial) parser.set('selfsign', 'nextreplica', 500000) parser.set('selfsign', 'replicainterval', 500000) parser.write(fp) fp.close() except IOError, e: raise RuntimeError("Unable to increment serial number: %s" % str(e))
def setUp(self): testfile = "/tmp/attachconfig.db" if os.path.exists(testfile): os.remove(testfile) # important: 4 slashes for absolute paths! testdb = "sqlite:///%s" % testfile sql = """create table attachmentrules( id integer not null primary key, scope varchar(255) not null, checktype varchar(20) not null, action varchar(255) not null, regex varchar(255) not null, description varchar(255) not null, prio integer not null ) """ self.session = fuglu.extensions.sql.get_session(testdb) self.session.flush() self.session.execute(sql) self.tempdir = tempfile.mkdtemp('attachtestdb', 'fuglu') self.template = '%s/blockedfile.tmpl' % self.tempdir shutil.copy(CONFDIR + '/templates/blockedfile.tmpl.dist', self.template) shutil.copy(CONFDIR + '/rules/default-filenames.conf.dist', '%s/default-filenames.conf' % self.tempdir) shutil.copy(CONFDIR + '/rules/default-filetypes.conf.dist', '%s/default-filetypes.conf' % self.tempdir) config = RawConfigParser() config.add_section('FiletypePlugin') config.set('FiletypePlugin', 'template_blockedfile', self.template) config.set('FiletypePlugin', 'rulesdir', self.tempdir) config.set('FiletypePlugin', 'dbconnectstring', testdb) config.set('FiletypePlugin', 'blockaction', 'DELETE') config.set('FiletypePlugin', 'sendbounce', 'True') config.set( 'FiletypePlugin', 'query', 'SELECT action,regex,description FROM attachmentrules WHERE scope=:scope AND checktype=:checktype ORDER BY prio' ) config.add_section('main') config.set('main', 'disablebounces', '1') config.set('FiletypePlugin', 'checkarchivenames', 'False') config.set('FiletypePlugin', 'checkarchivecontent', 'False') config.set('FiletypePlugin', 'archivecontentmaxsize', '500000') self.candidate = FiletypePlugin(config)
class SupervisorConfig(object): def __init__(self, infile_path=INFILE_PATH, outfile_path=OUTFILE_PATH): self.infile_path = infile_path self.outfile_path = outfile_path self.config = RawConfigParser() self.config.read(self.infile_path) def add_program(self, service_name, extra_args=None): extra_args = [] if extra_args is None else extra_args[:] section_name = 'program:{}'.format(service_name) if extra_args: section_name = '{}_{}'.format(section_name, '-'.join(extra_args)) self.config.add_section(section_name) command_args = PROGRAM_COMMANDS[service_name] + extra_args command_string = ' '.join(command_args) self.config.set(section_name, 'command', command_string) if service_name in PROGRAM_PARAMETERS: for key, value in PROGRAM_PARAMETERS[service_name].iteritems(): self.config.set(section_name, key, value) else: for key, value in DEFAULT_PARAMETERS.iteritems(): self.config.set(section_name, key, value) self.config.set(section_name, 'stdout_logfile', LOG_PATH.format(service_name)) def update(self): # PNA services (one monitor per interface and one pusher) if getenv('OBSRVBL_PNA_SERVICE', 'false') == 'true': for iface in getenv('OBSRVBL_PNA_IFACES', '').split(): self.add_program('ona-pna-monitor', extra_args=[iface]) self.add_program('ona-pna-pusher') # All other services for flag, program_list in ENABLE_FLAGS: if getenv(flag, 'false') != 'true': continue for program in program_list: self.add_program(program) def write(self): with io.open(self.outfile_path, 'wb') as outfile: self.config.write(outfile)
def add_backend(self, backend_name, module_name, params, edit=False): if not backend_name: raise ValueError(u'Please give a name to the configured backend.') config = RawConfigParser() config.read(self.confpath) if not edit: try: config.add_section(backend_name) except DuplicateSectionError: raise BackendAlreadyExists(backend_name) config.set(backend_name, '_module', module_name) for key, value in params.iteritems(): if isinstance(value, unicode): value = value.encode('utf-8') config.set(backend_name, key, value) with open(self.confpath, 'wb') as f: config.write(f)
def ignore(self, target, repo): config_path = os.path.join(repo, '.hg', 'hgrc') ignore_file = os.path.join('.hg', 'dependencies') ignore_path = os.path.join(repo, ignore_file) config = RawConfigParser() config.read(config_path) if not config.has_section('ui'): config.add_section('ui') config.set('ui', 'ignore.dependencies', ignore_file) with open(config_path, 'w') as stream: config.write(stream) module = os.path.relpath(target, repo) _ensure_line_exists(ignore_path, module)
def write_config(args, config_file=None): """ If config_file is None, then a name is automatically generated """ config = ConfigParser() config.add_section("main") config.set("main", "path", os.path.abspath(args.model_path)) config.set("main", "corpus_file", os.path.abspath(args.corpus_filename)) config.set("main", "raw_corpus", os.path.abspath(args.corpus_path)) config.set("main", "sentences", args.sentences) if args.bibtex: config.set("main", "label_module", "topicexplorer.extensions.bibtex") config.add_section("bibtex") config.set("bibtex", "path", args.bibtex) config.add_section("www") config.set("www", "corpus_name", args.corpus_print_name) config.set("www", "icons", "link") config.set("www", "fulltext", "false") config.add_section("logging") config.set("logging", "path", "logs/%s/{0}.log" % args.corpus_name) if args.htrc: config.set("main", "label_module", "topicexplorer.extensions.htrc") if not args.corpus_print_name: config.set("www", "corpus_name", "HTRC Data Capsule") config.set("www", "doc_title_format", '<a href="{1}">{0}</a>') config.set("www", "doc_url_format", 'http://hdl.handle.net/2027/{0}') config.set("www", "icons", "htrc,htrcbook,link") config.set("main", "htrc", True) if config_file is None: config_file = args.corpus_name + ".ini" overwrite = None if os.path.exists(config_file) else True while not overwrite: overwrite = raw_input( "\nConfig file {0} exists. Overwrite? [Y/n] ".format( config_file)) overwrite = overwrite.lower().strip() if overwrite == 'n': config_i = 0 while os.path.exists(config_file): config_file = args.corpus_name + ".%d.ini" % config_i config_i += 1 config_file = raw_input("Enter new filename [default: {0}]: ".format(config_file))\ or config_file elif overwrite == '' or overwrite == 'y': overwrite = True print "Writing configuration file", config_file with open(config_file, "wb") as configfh: config.write(configfh) return config_file
def convert_download_checkpoints(self): """ Convert all pickle download checkpoints to .state files. """ checkpoint_dir = self.session.get_downloads_pstate_dir() filelist = os.listdir(checkpoint_dir) if not any([filename.endswith('.pickle') for filename in filelist]): return if os.path.exists(checkpoint_dir): for old_filename in glob.glob( os.path.join(checkpoint_dir, '*.pickle')): old_checkpoint = None try: with open(old_filename, "rb") as old_file: old_checkpoint = pickle.load(old_file) except (EOFError, KeyError): # Pickle file appears to be corrupted, remove it and continue os.remove(old_filename) continue new_checkpoint = RawConfigParser() new_checkpoint.add_section('downloadconfig') new_checkpoint.add_section('state') for key, value in old_checkpoint['dlconfig'].iteritems(): if key in [ 'saveas', 'max_upload_rate', 'max_download_rate', 'super_seeder', 'mode', 'selected_files', 'correctedfilename' ]: new_checkpoint.set('downloadconfig', key, value) new_checkpoint.set('state', 'version', PERSISTENTSTATE_CURRENTVERSION) new_checkpoint.set('state', 'engineresumedata', old_checkpoint['engineresumedata']) new_checkpoint.set('state', 'dlstate', old_checkpoint['dlstate']) new_checkpoint.set('state', 'metainfo', old_checkpoint['metainfo']) with open(old_filename.replace('.pickle', '.state'), "wb") as new_file: new_checkpoint.write(new_file) os.remove(old_filename)
def create_gc3pie_config_snippet(cluster): """ Create a configuration file snippet to be used with GC3Pie. """ auth_section = 'auth/elasticluster_%s' % cluster.name resource_section = 'resource/elasticluster_%s' % cluster.name cfg = RawConfigParser() cfg.add_section(auth_section) frontend_node = cluster.get_ssh_to_node() cfg.set(auth_section, 'type', 'ssh') cfg.set(auth_section, 'username', frontend_node.image_user) cluster_info = inspect_node(frontend_node) cfg.add_section(resource_section) cfg.set(resource_section, 'enabled', 'yes') cfg.set(resource_section, 'transport', 'ssh') cfg.set(resource_section, 'frontend', frontend_node.preferred_ip) if not cluster_info: log.error("Unable to gather enough information from the cluster. " "Following informatino are only partial!") cluster_info = { 'architecture': 'unknown', 'type': 'unknown', 'max_cores': -1, 'max_cores_per_job': -1, 'max_memory_per_core': -1, 'max_walltime': '672hours' } cfg.set(resource_section, 'type', cluster_info['type']) cfg.set(resource_section, 'architecture', cluster_info['architecture']) cfg.set(resource_section, 'max_cores', cluster_info.get('max_cores', 1)) cfg.set(resource_section, 'max_cores_per_job', cluster_info.get('max_cores_per_job', 1)) cfg.set(resource_section, 'max_memory_per_core', cluster_info.get('max_memory_per_core', '2GB')) cfg.set(resource_section, 'max_walltime', cluster_info.get('max_walltime', '672hours')) cfgstring = StringIO() cfg.write(cfgstring) return cfgstring.getvalue()
def add_project(project_name, specfile): """Add a project to the global configuration file. :param project_name: Name of the project :param specfile: path to the data dictionary used by the project. :type project_name: str :type specfile: str :return: None """ if not op.isabs(specfile): raise ValueError("Path to the schema should be absolute.") path = locate_config_file() parser = RawConfigParser() parser.read(path) parser.add_section(project_name) parser.set(project_name, "specfile", specfile) with open(path, "w") as f: parser.write(f)
def test_invalid_rule(self): """ Makes sure the parser raises an error with invalid rules """ config = RawConfigParser() # Create sections in the config file for rule_name, options, _ in INVALID_RULES: config.add_section(rule_name) for option_name, val in options.items(): config.set(rule_name, option_name, val) # Test invalid sections that should fail to parse for rule_name, _, exception in INVALID_RULES: try: load_rule(config, PLUGINS_MOCK, rule_name) self.fail() except RulesParseError, ex: if str(ex).find(exception) < 0: self.fail()
def save_ui_config(defaults, section, save_options, error_callback): p = RawConfigParser() filename = os.path.join(defaults['data_dir'], 'ui_config') p.read(filename) p.remove_section(section) p.add_section(section) for name in save_options: p.set(section, name, defaults[name]) try: f = file(filename, 'w') p.write(f) f.close() except Exception, e: try: f.close() except: pass error_callback(ERROR, 'Could not permanently save options: ' + str(e))
def fixRemoteRepo(tree, repo): """Fix up the default remote repo URL for an unbundled repo @param tree {str} The name of the tree, e.g. "mozilla-release" @param repo {str} The path to the repo, e.g. "/temp/mozilla/src" """ url = getRepoFromTree(tree) filename = os.path.join(repo, ".hg", "hgrc") from ConfigParser import RawConfigParser config = RawConfigParser() config.read([filename]) if not config.has_section("paths"): config.add_section("paths") config.set("paths", "default", getRepoFromTree(tree)) f = open(filename, "w") try: config.write(f) finally: f.close()