def main(): if len(sys.argv) < 2: sys.stderr.write('Usage: %s CONFIG_URI\n' % sys.argv[0]) sys.exit(1) config_uri = sys.argv.pop(1) config = ConfigParser() config.read(config_uri) vars = { 'CELERY_BROKER': config.get('app:main', 'celery.broker'), 'here': dirname(abspath('supervisord.conf')), 'CONFIG_FILE': config_uri } for fname in ('supervisord.conf',): tmpl = open(fname+'.tmpl').read() inifile = open(fname, 'w') inifile.write(tmpl % vars) inifile.close() if not exists('var'): mkdir('var') if not exists('var/log'): mkdir('var/log') if not exists('var/run'): mkdir('var/run')
def readFrom(self, path, section): parser = ConfigParser() if not parser.read(path): raise CarbonConfigException("Failed to read config file %s" % path) if not parser.has_section(section): return for key, value in parser.items(section): key = key.upper() # Detect type from defaults dict if key in defaults: valueType = type(defaults[key]) else: valueType = str if valueType is list: value = [v.strip() for v in value.split(',')] elif valueType is bool: value = parser.getboolean(section, key) else: # Attempt to figure out numeric types automatically try: value = int(value) except: try: value = float(value) except: pass self[key] = value
class ConfigLoader(): def __init__(self): try: open(cfg_name, 'r').close() except: sys.stderr.write(u'严重错误,无法读取配置文件!程序自动退出。\n') exit(-1) self.config = ConfigParser() self.config.read(cfg_name) self.system = dict(self.config.items('system')) self.config_check() def config_check(self): try: # 首先检查设定的全局存储目录是否合法 check_path(self.read('global_pos')) # 然后检查管理员的下载目录是否存在 root_path = os.path.join(self.read('global_pos'), 'root') if not os.path.exists(root_path): os.mkdir(root_path) except Exception, err: sys.stderr.write(u'系统错误,原因:%s\n' % err) exit(-1) # 接下来检查端口是否可用 if check_port(self.read('port_name')): sys.stderr.write(u'系统错误,端口被占用!\n') exit(-1)
def sync(): # Add or replace the relevant properites from galaxy.ini # into reports.ini reports_config_file = "config/reports.ini" if len(argv) > 1: reports_config_file = argv[1] universe_config_file = "config/galaxy.ini" if len(argv) > 2: universe_config_file = argv[2] parser = ConfigParser() parser.read(universe_config_file) with open(reports_config_file, "r") as f: reports_config_lines = f.readlines() replaced_properties = set([]) with open(reports_config_file, "w") as f: # Write all properties from reports config replacing as # needed. for reports_config_line in reports_config_lines: (line, replaced_property) = get_synced_line(reports_config_line, parser) if replaced_property: replaced_properties.add(replaced_property) f.write(line) # If any properties appear in universe config and not in # reports write these as well. for replacement_property in REPLACE_PROPERTIES: if parser.has_option(MAIN_SECTION, replacement_property) and \ not (replacement_property in replaced_properties): f.write(get_universe_line(replacement_property, parser))
def read_last_synced(self, _job, _smb_connection=None): """ Read the date and time from the last_synced property of the destination status file :param _job: A SyncJob instance :param _smb_connection: An SMB connection. :return: """ _cfg = ConfigParser() _file_obj = None # Is it a remote host? if _smb_connection is not None: _file_obj = read_string_file_smb(_smb_connection, os.path.join(_job.destination_folder, 'ofs_status.txt')) else: try: _file_obj = open(os.path.join(_job.destination_folder, 'ofs_status.txt'), "r") except IOError: _file_obj = None if _file_obj: _cfg.readfp(_file_obj) _file_obj.close() _result = _cfg.get("history", "last_synced") if _result is not None: return datetime.datetime.strptime(_result, "%Y-%m-%d %H:%M:%S.%f") else: return None
def _load_object_post_as_copy_conf(self, conf): if ('object_post_as_copy' in conf or '__file__' not in conf): # Option is explicitly set in middleware conf. In that case, # we assume operator knows what he's doing. # This takes preference over the one set in proxy app return cp = ConfigParser() if os.path.isdir(conf['__file__']): read_conf_dir(cp, conf['__file__']) else: cp.read(conf['__file__']) try: pipe = cp.get("pipeline:main", "pipeline") except (NoSectionError, NoOptionError): return proxy_name = pipe.rsplit(None, 1)[-1] proxy_section = "app:" + proxy_name try: conf['object_post_as_copy'] = cp.get(proxy_section, 'object_post_as_copy') except (NoSectionError, NoOptionError): pass
def __init__(self): handlers = [ (r"/", BrowseHandler), (r"/get/(.*)\.md5sum", SumHandler), (r"/get/(.*)\.zip", ZipHandler), (r"/get/(.*/CHANGES.txt)", tornado.web.StaticFileHandler, {"path": "/opt/www/mirror"}), (r"/get/(.*)", Base62Handler), (r"/rss", RssHandler), (r"/api", ApiHandler), (r"/mirror", MirrorApplicationHandler), ] settings = dict( debug=options.debug ) super(Application, self).__init__(handlers, **settings) config = ConfigParser() config.readfp(open(options.config)) # One global connection init_database(create_engine(config.get('database', 'uri'))) self.db = DBSession template_path = os.path.join(os.path.dirname(__file__), "templates") self.lookup = TemplateLookup(directories=[template_path]) self.activebuilds = ActiveBuilds() #self.stats = Stats() self.mirrorpool = WeightedChoice(( ('http://oss.reflected.net/%s', 1000), ('http://mirror.sea.tdrevolution.net/%s', 500), ('http://cm.sponsored.cb-webhosting.de/%s', 50), ('http://mirror.netcologne.de/cyanogenmod/%s', 75), ))
def test_import_local(self): self.tool.run( config=(self.yaml_config, self.config), verbose=True, clobber=False, repo_dir=self.working_dir, repo_url=self.workspace.working_dir, ini_config=self.ini_config, ini_section='app:main', update_config=True, repo_name=None, repo_host=None) cp = ConfigParser() cp.read(self.ini_config) self.assertEqual( cp.get('app:main', 'unicore.content_repo_urls').strip(), os.path.basename(self.workspace.working_dir)) with open(self.yaml_config, 'r') as fp: data = yaml.safe_load(fp) repo_name = parse_repo_name(self.workspace.working_dir) self.assertEqual(data['repositories'], { repo_name: self.workspace.working_dir })
def test_import_remote(self): repo_name = parse_repo_name(self.workspace.working_dir) repo_location = 'http://localhost:8080/repos/%s.json' % repo_name responses.add_callback( responses.POST, 'http://localhost:8080/repos.json', callback=lambda _: (301, {'Location': repo_location}, '')) self.tool.run( config=(self.yaml_config, self.config), verbose=True, clobber=False, repo_dir=self.working_dir, repo_url=self.workspace.working_dir, ini_config=self.ini_config, ini_section='app:main', update_config=True, repo_name=None, repo_host='http://localhost:8080') cp = ConfigParser() cp.read(self.ini_config) self.assertEqual( cp.get('app:main', 'unicore.content_repo_urls').strip(), repo_location) with open(self.yaml_config, 'r') as fp: data = yaml.safe_load(fp) self.assertEqual(data['repositories'], { repo_name: self.workspace.working_dir })
def main(numthreads=10): t1 = time.time() queue = Queue() factory = TokenFactory() config = ConfigParser() config.read('vk_api.conf') url = API.get_url( app_id=config.get('api', 'id'), app_key=config.get('api', 'key'), permissions=PERMISSIONS, redirect_uri=URI, display=DISPLAY, api_version=VERSION) # TODO: check token expiration token_pair = factory.get_token_pair() if not token_pair: token_pair = factory.store_token_pair(url) api = API(token=token_pair[0],user_id=token_pair[1]) audio = api.audio data = audio.get if data: for item in data['response']['items']: queue.put(item) for i in range(numthreads): t = DownloadThread(queue, FILE_DIR) t.start() queue.join() t2 = time.time() print('Time: {0}'.format(t2-t1))
def __init__(self): handlers = [ (r"/", BrowseHandler), (r"/get/(.*)\.md5sum", SumHandler), (r"/get/(.*)\.zip", ZipHandler), (r"/get/(.*/CHANGES.txt)", tornado.web.StaticFileHandler, {"path": "/opt/www/mirror"}), (r"/get/(.*)", Base62Handler), (r"/api", ApiHandler), (r"/mirror", MirrorApplicationHandler), (r"/static/(.*)", tornado.web.StaticFileHandler, {"path": os.path.join(os.path.dirname(__file__), "static")}), ] settings = dict( debug=options.debug, ) super(Application, self).__init__(handlers, **settings) config = ConfigParser() config.readfp(open(options.config)) # One global connection init_database(create_engine(config.get('database', 'uri'))) self.db = DBSession template_path = os.path.join(os.path.dirname(__file__), "templates") self.lookup = TemplateLookup(directories=[template_path], filesystem_checks=False) self.mirrorpool = WeightedChoice(( ('http://mirror.galliumos.org/%s', 1000), ))
def __init__(self, config_path): self.CONFIG['globals'] = dict(SERVICE_GLOBAL_DEFAULT) self.CONFIG['tcprpc'] = dict(SERVICE_TCPRPC_DEFAULT) self.CONFIG['ssltcprpc'] = dict(SERVICE_SSLTCPRPC_DEFAULT) self.CONFIG['soaprpc'] = dict(SERVICE_SOAPRPC_DEFAULT) self.CONFIG['sslsoaprpc'] = dict(SERVICE_SSLSOAPRPC_DEFAULT) self.CONFIG['udprpc'] = dict(SERVICE_UDPRPC_DEFAULT) self.CONFIG['unixrpc'] = dict(SERVICE_UNIXRPC_DEFAULT) self.CONFIG['unixudprpc'] = dict(SERVICE_UNIXUDPRPC_DEFAULT) self.CONFIG['tcpv6rpc'] = dict(SERVICE_TCPV6RPC_DEFAULT) self.CONFIG['ssltcpv6rpc'] = dict(SERVICE_SSLTCPV6RPC_DEFAULT) self.CONFIG['udpv6rpc'] = dict(SERVICE_UDPV6RPC_DEFAULT) cf = ConfigParser() cf.read(config_path) l = {'true':True,'false':False,'none':None, 'y': True,'yes':True, 'n': False,'no': False, 'enable':True,'disable':False} for s in cf.sections(): if s.lower() in self.CONFIG: for (n,v) in cf.items(s): if v.lower() in l: v = l[v.lower()] elif n.lower() in ('port','timeout','poll_interval','max_children'): v = int(v) if n.lower() in self.CONFIG[s]: if type(v) == str and len(v) > 0: if self.CONFIG[s][n] != v.lower(): self.CONFIG[s][n] = v.lower() elif type(v) in (bool, int, None): if self.CONFIG[s][n] != v and v: self.CONFIG[s][n] = v else: _logger.info( 'Invalid parameter: - %s section: - %s - Ignored' % (n,s)) else: _logger.info( 'Invalid section: - %s - Ignored' % (s,))
def test_umit_conf_content(filename): parser = ConfigParser() parser.read(filename) # Paths section section = "paths" assert get_or_false(parser, section, "nmap_command_path")
def install_mercurial_hook(): """ Installs the mercurial precommit hook by adding a hook to the hgrc file in the .hg directory of the repository. """ repo_dir = get_repo_dir() config_file = os.path.join(repo_dir, '.hg', 'hgrc') config_parser = ConfigParser() config_parser.read(config_file) precommit_abs_file = os.path.join(repo_dir, 'scripts', 'codestyleprecommit.py') section = 'hooks' key = 'pretxncommit.precommit' value = 'python:%s:mercurial_hook' % precommit_abs_file if not config_parser.has_section(section): config_parser.add_section(section) config_parser.set(section, key, value) with open(config_file, 'w') as config: config_parser.write(config)
def startService(self): BaseUpstreamScheduler.startService(self) log.msg("starting l10n scheduler") if self.inipath is None: # testing, don't trigger tree builds return # trigger tree builds for our trees, clear() first cp = ConfigParser() cp.read(self.inipath) self.trees.clear() _ds = [] for tree in cp.sections(): # create a BuildSet, submit it to the BuildMaster props = properties.Properties() props.update({ 'tree': tree, 'l10nbuilds': self.inipath, }, "Scheduler") bs = buildset.BuildSet([self.treebuilder], SourceStamp(), properties=props) self.submitBuildSet(bs) _ds.append(bs.waitUntilFinished()) d = defer.DeferredList(_ds) d.addCallback(self.onTreesBuilt) self.waitOnTree = d
def restore_rois(self, roifile): """restore ROI setting from ROI.dat file""" cp = ConfigParser() cp.read(roifile) rois = [] self.mcas[0].clear_rois() prefix = self.mcas[0]._prefix if prefix.endswith('.'): prefix = prefix[:-1] iroi = 0 for a in cp.options('rois'): if a.lower().startswith('roi'): name, dat = cp.get('rois', a).split('|') lims = [int(i) for i in dat.split()] lo, hi = lims[0], lims[1] roi = ROI(prefix=prefix, roi=iroi) roi.left = lo roi.right = hi roi.name = name.strip() rois.append(roi) iroi += 1 epics.poll(0.050, 1.0) self.mcas[0].set_rois(rois) cal0 = self.mcas[0].get_calib() for mca in self.mcas[1:]: mca.set_rois(rois, calib=cal0)
def readConfigFile(self, config_file_name = None): """Reads the given config file, or if none is given, the default config file. :param config_file_name: a String specifying the name of the config file to read. """ # Create ConfigParser and UDConfig Objects confparser = ConfigParser() self.opts = YumCronConfig() #If no config file name is given, fall back to the default if config_file_name == None: config_file_name = default_config_file # Attempt to read the config file. confparser.read will return a # list of the files that were read successfully, so check that it # contains config_file if config_file_name not in confparser.read(config_file_name): print >> sys.stderr, "Error reading config file:", config_file_name sys.exit(1) # Populate the values into the opts object self.opts.populate(confparser, 'commands') self.opts.populate(confparser, 'emitters') self.opts.populate(confparser, 'email') self.opts.populate(confparser, 'groups') self._confparser = confparser #If the system name is not given, set it by getting the hostname if self.opts.system_name == 'None' : self.opts.system_name = gethostname() if 'None' in self.opts.group_list: self.opts.group_list = []
def _load_configuration(environment, path): """Loads a given configuration file specified by path and environment header (ini file). returns a key value representing the configuration. Values enclosed in {} are automatically decrypted using the $FURTHER_PASSWORD variable. Values that equal [RND] will be replaced with a random string.""" # Read configuration file parser = ConfigParser() parser.read(path) config = {} for option in parser.options(environment): value = parser.get(environment, option) # Handle encrypted configuration if (re.match(r'^\{.*\}$', value)): encrypted_value = re.match(r'^\{(.*)\}$', value).group(1) value = (local('decrypt.sh input="' + encrypted_value + '" password=$FURTHER_PASSWORD algorithm="PBEWithSHA1AndDESede" verbose="false"', capture=True)) # Handle random values if (re.match(r'\[RND\]', value)): value = _random_string() config[option] = value; return config
def get_mapping_fields(self, cr, uid, shop_id, context=None, reverse=False): result = {} if not context: context = {} shop_pool = self.pool.get('sale.shop') shop_data = shop_pool.read(cr, uid, shop_id, ['prestashop_config_path']) if not shop_data['prestashop_config_path'] or \ not shop_data['prestashop_config_path'].endswith(".conf") or\ not self._prestashop: return result,False config = ConfigParser() config.read(shop_data['prestashop_config_path']) if not self._name in config.sections(): return result,False mapping = dict(config.items(self._name)) return eval(mapping.get('type_mapping',"[]"))
def get_config_parameter(jobname, parameter_name, is_parameter_collection = False): """Detect export method of JOBNAME. Basically, parse JOBNAME.cfg and return export_method. Return None if problem found.""" jobconfig = ConfigParser() jobconffile = CFG_ETCDIR + os.sep + 'bibexport' + os.sep + jobname + '.cfg' if not os.path.exists(jobconffile): write_message("ERROR: cannot find config file %s." % jobconffile) return None jobconfig.read(jobconffile) if is_parameter_collection: all_items = jobconfig.items(section='export_job') parameters = [] for item_name, item_value in all_items: if item_name.startswith(parameter_name): parameters.append(item_value) return parameters else: parameter = jobconfig.get('export_job', parameter_name) return parameter
class Config(object): def __init__(self): self.parser = ConfigParser() self.read_configuration() def read_configuration(self): # check if we are in svn working dir if not os.path.isdir('.svn'): raise ExsvnError("Current directory is not a svn working directory") fullcfgname = os.path.join('.svn', CONFIG_FILENAME) if not os.path.exists(fullcfgname): self.create_configuration(fullcfgname) self.parser.read(fullcfgname) def create_configuration(self, fname): """Create new configuration file""" print "Creating default configuration in %s" % fname cfg = self.get_default_configuration() f = file(fname, "w") f.write(cfg) f.close() # protect from others os.chmod(fname, 0600) def get_default_configuration(self): return """
def config(): global video_format global resolution configr = ConfigParser() configr.read('settings.ini') quality = configr.get('SETTINGS', 'video_quality') qualities = {'android': ['107', '71'], '360p': ['106', '60'], '480p': ['106', '61'], '720p': ['106', '62'], '1080p': ['108', '80'], 'highest': ['0', '0']} video_format = qualities[quality][0] resolution = qualities[quality][1] global lang global lang2 lang = configr.get('SETTINGS', 'language') lang2 = configr.get('SETTINGS', 'language2') langd = {'Espanol_Espana': u'Español (Espana)', 'Francais': u'Français (France)', 'Portugues': u'Português (Brasil)', 'English': u'English', 'Espanol': u'Español', 'Turkce': u'Türkçe', 'Italiano': u'Italiano', 'Arabic': u'العربية', 'Deutsch': u'Deutsch'} lang = langd[lang] lang2 = langd[lang2] forcesub = configr.getboolean('SETTINGS', 'forcesubtitle') global forceusa forceusa = configr.getboolean('SETTINGS', 'forceusa') global localizecookies localizecookies = configr.getboolean('SETTINGS', 'localizecookies') onlymainsub = configr.getboolean('SETTINGS', 'onlymainsub') return [lang, lang2, forcesub, forceusa, localizecookies, quality, onlymainsub]
class Config(object): def __new__(type, *args, **kwargs): if not '_the_instance' in type.__dict__: type._the_instance = object.__new__(type) return type._the_instance def __init__(self, filename = None): if filename != None: self.filename = filename self.config = ConfigParser() self.config.read(self.filename) def get_section(self,name): if self.config.has_section(name): return _Section(name, self.config.items(name), self) else: return _Section(name, [], self) def __getattr__(self, attr): if attr == 'irc': return self.get_section('IRC') elif attr == 'ldap': return self.get_section('LDAP') elif attr == 'rpc': return self.get_section('RPC') elif attr == 'bot': return self.get_section('Bot') elif attr == 'smtp': return self.get_section('SMTP') elif attr == 'db': return self.get_section('Database') elif attr == 'identica': return self.get_section('Identi.ca') else: raise AttributeError('No section \'%s\' in Config.' % attr)
def __init__(self, config_path=CONFIG_PATH): ConfigParser.__init__(self, allow_no_value=True) self.config_path = config_path try: self.read(self.config_path) except: # It doesn't exist! self.createDefaultConfig()
def test_yaml_snippet(self): if not self.xml_filename or not self.yaml_filename: return xml_filepath = os.path.join(self.fixtures_path, self.xml_filename) expected_xml = u"%s" % open(xml_filepath, 'r').read() yaml_filepath = os.path.join(self.fixtures_path, self.yaml_filename) if self.conf_filename: config = ConfigParser() conf_filepath = os.path.join(self.fixtures_path, self.conf_filename) config.readfp(open(conf_filepath)) else: config = None parser = YamlParser(config) parser.parse(yaml_filepath) # Generate the XML tree parser.generateXML() parser.jobs.sort(key=operator.attrgetter('name')) # Prettify generated XML pretty_xml = "\n".join(job.output() for job in parser.jobs) self.assertThat( pretty_xml, testtools.matchers.DocTestMatches(expected_xml, doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE | doctest.REPORT_NDIFF) )
def reload_constraints(): """ Parse SWIFT_CONF_FILE and reset module level global contraint attrs, populating OVERRIDE_CONSTRAINTS AND EFFECTIVE_CONSTRAINTS along the way. """ global SWIFT_CONSTRAINTS_LOADED, OVERRIDE_CONSTRAINTS SWIFT_CONSTRAINTS_LOADED = False OVERRIDE_CONSTRAINTS = {} constraints_conf = ConfigParser() if constraints_conf.read(utils.SWIFT_CONF_FILE): SWIFT_CONSTRAINTS_LOADED = True for name in DEFAULT_CONSTRAINTS: try: value = constraints_conf.get('swift-constraints', name) except NoOptionError: pass except NoSectionError: # We are never going to find the section for another option break else: try: value = int(value) except ValueError: value = utils.list_from_csv(value) OVERRIDE_CONSTRAINTS[name] = value for name, default in DEFAULT_CONSTRAINTS.items(): value = OVERRIDE_CONSTRAINTS.get(name, default) EFFECTIVE_CONSTRAINTS[name] = value # "globals" in this context is module level globals, always. globals()[name.upper()] = value
def init(): options.config = os.path.abspath( options.config ) if options.username == 'all': options.username = None if options.email == 'all': options.email = None os.chdir( os.path.dirname( options.config ) ) sys.path.append( 'lib' ) from galaxy import eggs import pkg_resources import galaxy.config from galaxy.objectstore import build_object_store_from_config # lazy globals()['nice_size'] = __import__( 'galaxy.util', globals(), locals(), ( 'nice_size', ) ).nice_size config_parser = ConfigParser( dict( here = os.getcwd(), database_connection = 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) ) config_parser.read( os.path.basename( options.config ) ) config_dict = {} for key, value in config_parser.items( "app:main" ): config_dict[key] = value config = galaxy.config.Configuration( **config_dict ) object_store = build_object_store_from_config( config ) from galaxy.model import mapping return mapping.init( config.file_path, config.database_connection, create_tables = False, object_store = object_store ), object_store, config.database_connection.split(':')[0]
def __init__(self): handlers = [ (r"/", BrowseHandler), (r"/get/(.*)\.md5sum", SumHandler), (r"/get/(.*)\.zip", ZipHandler), (r"/get/(.*/CHANGES.txt)", tornado.web.StaticFileHandler, {"path": "/opt/www/mirror"}), (r"/get/(.*)", Base62Handler), (r"/rss", RssHandler), (r"/api", ApiHandler), (r"/mirror", MirrorApplicationHandler), (r"/static/(.*)", tornado.web.StaticFileHandler, {"path": os.path.join(os.path.dirname(__file__), "static")}), ] settings = dict( debug=options.debug, ) super(Application, self).__init__(handlers, **settings) config = ConfigParser() config.readfp(open(options.config)) # One global connection init_database(create_engine(config.get('database', 'uri'))) self.db = DBSession template_path = os.path.join(os.path.dirname(__file__), "templates") self.lookup = TemplateLookup(directories=[template_path], filesystem_checks=False) self.mirrorpool = WeightedChoice(( ('http://mirror.slc.cyanogenmod.org/%s', 2000), ('http://oss.reflected.net/%s', 1000), ('http://mirror.symnds.com/software/cm/%s', 1000), ('http://mirror.netcologne.de/cyanogenmod/%s', 200), ('http://cm.sponsored.cb-webhosting.de/%s', 25), ('http://mirror.i3d.net/pub/cyanogenmod/%s', 50), ))
class SecretSanta(object): def __init__(self, config): log.debug("Constructing SecretSanta main object.") self.people = [] self.givers = [] self.receivers = [] self.pcfile = config.get('global','peopleconf') log.debug("Peopleconf is %s" % self.pcfile) self.pc=ConfigParser() self.pc.read(self.pcfile) for sect in self.pc.sections(): p = SecretSantaPerson(sect, self.pc) self.people.append(p) for p in self.givers: self.receivers.append(p) def matchall(self): ''' Perform matching for all people with constraints. ''' rlist = [] for p in self.people: rlist.append(p) shuffle(rlist) log.debug("Performing matching...") for p in self.people: r = rlist.pop() while not p.matchok(r): rlist.append(r) shuffle(rlist) r = rlist.pop() p.receiver = r log.debug("%s -> %s\n" % (p.name, p.receiver.name)) def list(self): ''' Return string representation of all people in config. ''' log.debug("List all users...") s = "" for p in self.people: s+= str(p) return s def giverslist(self): ''' Return string in form of: Joe Bloe -> Mary Margaret Mary Margaret -> Susan Strong Susan Strong -> Joe Bloe ''' s = "" for p in self.people: s+= "%s -> %s\n" % ( p.name, p.receiver.name) return s
def set_auto_rsvp_groups(): '''Generates a group config file from user input. The config file is saved to CONFIG_FILENAME specified above. All groups of the current member are printed to the config file. However, any groups the user doesn't want to auto RSVP will be commented out with a '#'. ''' groups = get_groups() config_groups = [] for group in groups: ans = raw_input( 'Automatically RSVP yes for %s? [y/n]: ' % group['name'] ).lower() while ans not in ['y', 'n']: print 'Please enter a \'y\' or \'n\'.' ans = raw_input( 'Automatically RSVP yes for %s? [y/n]: ' % group['name'] ).lower() if ans == 'y': # We want to auto-rsvp for this group config_groups.append((str(group['id']), group['name'])) else: # Don't auto RSVP. We'll write add this group with a comment # preceding the line. config_groups.append(('#%s' % str(group['id']), group['name'])) config = ConfigParser() config.add_section('rsvp_groups') [config.set('rsvp_groups', group_id, group_name) for group_id, group_name in config_groups] write_config(config)