def set_auto_rsvp_groups(): '''Generates a group config file from user input. The config file is saved to CONFIG_FILENAME specified above. All groups of the current member are printed to the config file. However, any groups the user doesn't want to auto RSVP will be commented out with a '#'. ''' groups = get_groups() config_groups = [] for group in groups: ans = raw_input( 'Automatically RSVP yes for %s? [y/n]: ' % group['name'] ).lower() while ans not in ['y', 'n']: print 'Please enter a \'y\' or \'n\'.' ans = raw_input( 'Automatically RSVP yes for %s? [y/n]: ' % group['name'] ).lower() if ans == 'y': # We want to auto-rsvp for this group config_groups.append((str(group['id']), group['name'])) else: # Don't auto RSVP. We'll write add this group with a comment # preceding the line. config_groups.append(('#%s' % str(group['id']), group['name'])) config = ConfigParser() config.add_section('rsvp_groups') [config.set('rsvp_groups', group_id, group_name) for group_id, group_name in config_groups] write_config(config)
class SecretSanta(object): def __init__(self, config): log.debug("Constructing SecretSanta main object.") self.people = [] self.givers = [] self.receivers = [] self.pcfile = config.get('global','peopleconf') log.debug("Peopleconf is %s" % self.pcfile) self.pc=ConfigParser() self.pc.read(self.pcfile) for sect in self.pc.sections(): p = SecretSantaPerson(sect, self.pc) self.people.append(p) for p in self.givers: self.receivers.append(p) def matchall(self): ''' Perform matching for all people with constraints. ''' rlist = [] for p in self.people: rlist.append(p) shuffle(rlist) log.debug("Performing matching...") for p in self.people: r = rlist.pop() while not p.matchok(r): rlist.append(r) shuffle(rlist) r = rlist.pop() p.receiver = r log.debug("%s -> %s\n" % (p.name, p.receiver.name)) def list(self): ''' Return string representation of all people in config. ''' log.debug("List all users...") s = "" for p in self.people: s+= str(p) return s def giverslist(self): ''' Return string in form of: Joe Bloe -> Mary Margaret Mary Margaret -> Susan Strong Susan Strong -> Joe Bloe ''' s = "" for p in self.people: s+= "%s -> %s\n" % ( p.name, p.receiver.name) return s
def init(): options.config = os.path.abspath( options.config ) if options.username == 'all': options.username = None if options.email == 'all': options.email = None os.chdir( os.path.dirname( options.config ) ) sys.path.append( 'lib' ) from galaxy import eggs import pkg_resources import galaxy.config from galaxy.objectstore import build_object_store_from_config # lazy globals()['nice_size'] = __import__( 'galaxy.util', globals(), locals(), ( 'nice_size', ) ).nice_size config_parser = ConfigParser( dict( here = os.getcwd(), database_connection = 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) ) config_parser.read( os.path.basename( options.config ) ) config_dict = {} for key, value in config_parser.items( "app:main" ): config_dict[key] = value config = galaxy.config.Configuration( **config_dict ) object_store = build_object_store_from_config( config ) from galaxy.model import mapping return mapping.init( config.file_path, config.database_connection, create_tables = False, object_store = object_store ), object_store, config.database_connection.split(':')[0]
def main(): if len(sys.argv) < 2: sys.stderr.write('Usage: %s CONFIG_URI\n' % sys.argv[0]) sys.exit(1) config_uri = sys.argv.pop(1) config = ConfigParser() config.read(config_uri) vars = { 'CELERY_BROKER': config.get('app:main', 'celery.broker'), 'here': dirname(abspath('supervisord.conf')), 'CONFIG_FILE': config_uri } for fname in ('supervisord.conf',): tmpl = open(fname+'.tmpl').read() inifile = open(fname, 'w') inifile.write(tmpl % vars) inifile.close() if not exists('var'): mkdir('var') if not exists('var/log'): mkdir('var/log') if not exists('var/run'): mkdir('var/run')
class ConfigLoader(): def __init__(self): try: open(cfg_name, 'r').close() except: sys.stderr.write(u'严重错误,无法读取配置文件!程序自动退出。\n') exit(-1) self.config = ConfigParser() self.config.read(cfg_name) self.system = dict(self.config.items('system')) self.config_check() def config_check(self): try: # 首先检查设定的全局存储目录是否合法 check_path(self.read('global_pos')) # 然后检查管理员的下载目录是否存在 root_path = os.path.join(self.read('global_pos'), 'root') if not os.path.exists(root_path): os.mkdir(root_path) except Exception, err: sys.stderr.write(u'系统错误,原因:%s\n' % err) exit(-1) # 接下来检查端口是否可用 if check_port(self.read('port_name')): sys.stderr.write(u'系统错误,端口被占用!\n') exit(-1)
def test_yaml_snippet(self): if not self.xml_filename or not self.yaml_filename: return xml_filepath = os.path.join(self.fixtures_path, self.xml_filename) expected_xml = u"%s" % open(xml_filepath, 'r').read() yaml_filepath = os.path.join(self.fixtures_path, self.yaml_filename) if self.conf_filename: config = ConfigParser() conf_filepath = os.path.join(self.fixtures_path, self.conf_filename) config.readfp(open(conf_filepath)) else: config = None parser = YamlParser(config) parser.parse(yaml_filepath) # Generate the XML tree parser.generateXML() parser.jobs.sort(key=operator.attrgetter('name')) # Prettify generated XML pretty_xml = "\n".join(job.output() for job in parser.jobs) self.assertThat( pretty_xml, testtools.matchers.DocTestMatches(expected_xml, doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE | doctest.REPORT_NDIFF) )
def readFrom(self, path, section): parser = ConfigParser() if not parser.read(path): raise CarbonConfigException("Failed to read config file %s" % path) if not parser.has_section(section): return for key, value in parser.items(section): key = key.upper() # Detect type from defaults dict if key in defaults: valueType = type(defaults[key]) else: valueType = str if valueType is list: value = [v.strip() for v in value.split(',')] elif valueType is bool: value = parser.getboolean(section, key) else: # Attempt to figure out numeric types automatically try: value = int(value) except: try: value = float(value) except: pass self[key] = value
class Config(object): def __new__(type, *args, **kwargs): if not '_the_instance' in type.__dict__: type._the_instance = object.__new__(type) return type._the_instance def __init__(self, filename = None): if filename != None: self.filename = filename self.config = ConfigParser() self.config.read(self.filename) def get_section(self,name): if self.config.has_section(name): return _Section(name, self.config.items(name), self) else: return _Section(name, [], self) def __getattr__(self, attr): if attr == 'irc': return self.get_section('IRC') elif attr == 'ldap': return self.get_section('LDAP') elif attr == 'rpc': return self.get_section('RPC') elif attr == 'bot': return self.get_section('Bot') elif attr == 'smtp': return self.get_section('SMTP') elif attr == 'db': return self.get_section('Database') elif attr == 'identica': return self.get_section('Identi.ca') else: raise AttributeError('No section \'%s\' in Config.' % attr)
def read_last_synced(self, _job, _smb_connection=None): """ Read the date and time from the last_synced property of the destination status file :param _job: A SyncJob instance :param _smb_connection: An SMB connection. :return: """ _cfg = ConfigParser() _file_obj = None # Is it a remote host? if _smb_connection is not None: _file_obj = read_string_file_smb(_smb_connection, os.path.join(_job.destination_folder, 'ofs_status.txt')) else: try: _file_obj = open(os.path.join(_job.destination_folder, 'ofs_status.txt'), "r") except IOError: _file_obj = None if _file_obj: _cfg.readfp(_file_obj) _file_obj.close() _result = _cfg.get("history", "last_synced") if _result is not None: return datetime.datetime.strptime(_result, "%Y-%m-%d %H:%M:%S.%f") else: return None
class Config(object): def __init__(self): self.parser = ConfigParser() self.read_configuration() def read_configuration(self): # check if we are in svn working dir if not os.path.isdir('.svn'): raise ExsvnError("Current directory is not a svn working directory") fullcfgname = os.path.join('.svn', CONFIG_FILENAME) if not os.path.exists(fullcfgname): self.create_configuration(fullcfgname) self.parser.read(fullcfgname) def create_configuration(self, fname): """Create new configuration file""" print "Creating default configuration in %s" % fname cfg = self.get_default_configuration() f = file(fname, "w") f.write(cfg) f.close() # protect from others os.chmod(fname, 0600) def get_default_configuration(self): return """
def sync(): # Add or replace the relevant properites from galaxy.ini # into reports.ini reports_config_file = "config/reports.ini" if len(argv) > 1: reports_config_file = argv[1] universe_config_file = "config/galaxy.ini" if len(argv) > 2: universe_config_file = argv[2] parser = ConfigParser() parser.read(universe_config_file) with open(reports_config_file, "r") as f: reports_config_lines = f.readlines() replaced_properties = set([]) with open(reports_config_file, "w") as f: # Write all properties from reports config replacing as # needed. for reports_config_line in reports_config_lines: (line, replaced_property) = get_synced_line(reports_config_line, parser) if replaced_property: replaced_properties.add(replaced_property) f.write(line) # If any properties appear in universe config and not in # reports write these as well. for replacement_property in REPLACE_PROPERTIES: if parser.has_option(MAIN_SECTION, replacement_property) and \ not (replacement_property in replaced_properties): f.write(get_universe_line(replacement_property, parser))
def get_mapping_fields(self, cr, uid, shop_id, context=None, reverse=False): result = {} if not context: context = {} shop_pool = self.pool.get('sale.shop') shop_data = shop_pool.read(cr, uid, shop_id, ['prestashop_config_path']) if not shop_data['prestashop_config_path'] or \ not shop_data['prestashop_config_path'].endswith(".conf") or\ not self._prestashop: return result,False config = ConfigParser() config.read(shop_data['prestashop_config_path']) if not self._name in config.sections(): return result,False mapping = dict(config.items(self._name)) return eval(mapping.get('type_mapping',"[]"))
def __init__(self): handlers = [ (r"/", BrowseHandler), (r"/get/(.*)\.md5sum", SumHandler), (r"/get/(.*)\.zip", ZipHandler), (r"/get/(.*/CHANGES.txt)", tornado.web.StaticFileHandler, {"path": "/opt/www/mirror"}), (r"/get/(.*)", Base62Handler), (r"/rss", RssHandler), (r"/api", ApiHandler), (r"/mirror", MirrorApplicationHandler), ] settings = dict( debug=options.debug ) super(Application, self).__init__(handlers, **settings) config = ConfigParser() config.readfp(open(options.config)) # One global connection init_database(create_engine(config.get('database', 'uri'))) self.db = DBSession template_path = os.path.join(os.path.dirname(__file__), "templates") self.lookup = TemplateLookup(directories=[template_path]) self.activebuilds = ActiveBuilds() #self.stats = Stats() self.mirrorpool = WeightedChoice(( ('http://oss.reflected.net/%s', 1000), ('http://mirror.sea.tdrevolution.net/%s', 500), ('http://cm.sponsored.cb-webhosting.de/%s', 50), ('http://mirror.netcologne.de/cyanogenmod/%s', 75), ))
def readConfigFile(self, config_file_name = None): """Reads the given config file, or if none is given, the default config file. :param config_file_name: a String specifying the name of the config file to read. """ # Create ConfigParser and UDConfig Objects confparser = ConfigParser() self.opts = YumCronConfig() #If no config file name is given, fall back to the default if config_file_name == None: config_file_name = default_config_file # Attempt to read the config file. confparser.read will return a # list of the files that were read successfully, so check that it # contains config_file if config_file_name not in confparser.read(config_file_name): print >> sys.stderr, "Error reading config file:", config_file_name sys.exit(1) # Populate the values into the opts object self.opts.populate(confparser, 'commands') self.opts.populate(confparser, 'emitters') self.opts.populate(confparser, 'email') self.opts.populate(confparser, 'groups') self._confparser = confparser #If the system name is not given, set it by getting the hostname if self.opts.system_name == 'None' : self.opts.system_name = gethostname() if 'None' in self.opts.group_list: self.opts.group_list = []
def _load_object_post_as_copy_conf(self, conf): if ('object_post_as_copy' in conf or '__file__' not in conf): # Option is explicitly set in middleware conf. In that case, # we assume operator knows what he's doing. # This takes preference over the one set in proxy app return cp = ConfigParser() if os.path.isdir(conf['__file__']): read_conf_dir(cp, conf['__file__']) else: cp.read(conf['__file__']) try: pipe = cp.get("pipeline:main", "pipeline") except (NoSectionError, NoOptionError): return proxy_name = pipe.rsplit(None, 1)[-1] proxy_section = "app:" + proxy_name try: conf['object_post_as_copy'] = cp.get(proxy_section, 'object_post_as_copy') except (NoSectionError, NoOptionError): pass
def __init__(self): handlers = [ (r"/", BrowseHandler), (r"/get/(.*)\.md5sum", SumHandler), (r"/get/(.*)\.zip", ZipHandler), (r"/get/(.*/CHANGES.txt)", tornado.web.StaticFileHandler, {"path": "/opt/www/mirror"}), (r"/get/(.*)", Base62Handler), (r"/rss", RssHandler), (r"/api", ApiHandler), (r"/mirror", MirrorApplicationHandler), (r"/static/(.*)", tornado.web.StaticFileHandler, {"path": os.path.join(os.path.dirname(__file__), "static")}), ] settings = dict( debug=options.debug, ) super(Application, self).__init__(handlers, **settings) config = ConfigParser() config.readfp(open(options.config)) # One global connection init_database(create_engine(config.get('database', 'uri'))) self.db = DBSession template_path = os.path.join(os.path.dirname(__file__), "templates") self.lookup = TemplateLookup(directories=[template_path], filesystem_checks=False) self.mirrorpool = WeightedChoice(( ('http://mirror.slc.cyanogenmod.org/%s', 2000), ('http://oss.reflected.net/%s', 1000), ('http://mirror.symnds.com/software/cm/%s', 1000), ('http://mirror.netcologne.de/cyanogenmod/%s', 200), ('http://cm.sponsored.cb-webhosting.de/%s', 25), ('http://mirror.i3d.net/pub/cyanogenmod/%s', 50), ))
def startService(self): BaseUpstreamScheduler.startService(self) log.msg("starting l10n scheduler") if self.inipath is None: # testing, don't trigger tree builds return # trigger tree builds for our trees, clear() first cp = ConfigParser() cp.read(self.inipath) self.trees.clear() _ds = [] for tree in cp.sections(): # create a BuildSet, submit it to the BuildMaster props = properties.Properties() props.update({ 'tree': tree, 'l10nbuilds': self.inipath, }, "Scheduler") bs = buildset.BuildSet([self.treebuilder], SourceStamp(), properties=props) self.submitBuildSet(bs) _ds.append(bs.waitUntilFinished()) d = defer.DeferredList(_ds) d.addCallback(self.onTreesBuilt) self.waitOnTree = d
def __init__(self, config_path=CONFIG_PATH): ConfigParser.__init__(self, allow_no_value=True) self.config_path = config_path try: self.read(self.config_path) except: # It doesn't exist! self.createDefaultConfig()
def reload_constraints(): """ Parse SWIFT_CONF_FILE and reset module level global contraint attrs, populating OVERRIDE_CONSTRAINTS AND EFFECTIVE_CONSTRAINTS along the way. """ global SWIFT_CONSTRAINTS_LOADED, OVERRIDE_CONSTRAINTS SWIFT_CONSTRAINTS_LOADED = False OVERRIDE_CONSTRAINTS = {} constraints_conf = ConfigParser() if constraints_conf.read(utils.SWIFT_CONF_FILE): SWIFT_CONSTRAINTS_LOADED = True for name in DEFAULT_CONSTRAINTS: try: value = constraints_conf.get('swift-constraints', name) except NoOptionError: pass except NoSectionError: # We are never going to find the section for another option break else: try: value = int(value) except ValueError: value = utils.list_from_csv(value) OVERRIDE_CONSTRAINTS[name] = value for name, default in DEFAULT_CONSTRAINTS.items(): value = OVERRIDE_CONSTRAINTS.get(name, default) EFFECTIVE_CONSTRAINTS[name] = value # "globals" in this context is module level globals, always. globals()[name.upper()] = value
def get_config_parameter(jobname, parameter_name, is_parameter_collection = False): """Detect export method of JOBNAME. Basically, parse JOBNAME.cfg and return export_method. Return None if problem found.""" jobconfig = ConfigParser() jobconffile = CFG_ETCDIR + os.sep + 'bibexport' + os.sep + jobname + '.cfg' if not os.path.exists(jobconffile): write_message("ERROR: cannot find config file %s." % jobconffile) return None jobconfig.read(jobconffile) if is_parameter_collection: all_items = jobconfig.items(section='export_job') parameters = [] for item_name, item_value in all_items: if item_name.startswith(parameter_name): parameters.append(item_value) return parameters else: parameter = jobconfig.get('export_job', parameter_name) return parameter
def config(): global video_format global resolution configr = ConfigParser() configr.read('settings.ini') quality = configr.get('SETTINGS', 'video_quality') qualities = {'android': ['107', '71'], '360p': ['106', '60'], '480p': ['106', '61'], '720p': ['106', '62'], '1080p': ['108', '80'], 'highest': ['0', '0']} video_format = qualities[quality][0] resolution = qualities[quality][1] global lang global lang2 lang = configr.get('SETTINGS', 'language') lang2 = configr.get('SETTINGS', 'language2') langd = {'Espanol_Espana': u'Español (Espana)', 'Francais': u'Français (France)', 'Portugues': u'Português (Brasil)', 'English': u'English', 'Espanol': u'Español', 'Turkce': u'Türkçe', 'Italiano': u'Italiano', 'Arabic': u'العربية', 'Deutsch': u'Deutsch'} lang = langd[lang] lang2 = langd[lang2] forcesub = configr.getboolean('SETTINGS', 'forcesubtitle') global forceusa forceusa = configr.getboolean('SETTINGS', 'forceusa') global localizecookies localizecookies = configr.getboolean('SETTINGS', 'localizecookies') onlymainsub = configr.getboolean('SETTINGS', 'onlymainsub') return [lang, lang2, forcesub, forceusa, localizecookies, quality, onlymainsub]
def restore_rois(self, roifile): """restore ROI setting from ROI.dat file""" cp = ConfigParser() cp.read(roifile) rois = [] self.mcas[0].clear_rois() prefix = self.mcas[0]._prefix if prefix.endswith('.'): prefix = prefix[:-1] iroi = 0 for a in cp.options('rois'): if a.lower().startswith('roi'): name, dat = cp.get('rois', a).split('|') lims = [int(i) for i in dat.split()] lo, hi = lims[0], lims[1] roi = ROI(prefix=prefix, roi=iroi) roi.left = lo roi.right = hi roi.name = name.strip() rois.append(roi) iroi += 1 epics.poll(0.050, 1.0) self.mcas[0].set_rois(rois) cal0 = self.mcas[0].get_calib() for mca in self.mcas[1:]: mca.set_rois(rois, calib=cal0)
def test_import_local(self): self.tool.run( config=(self.yaml_config, self.config), verbose=True, clobber=False, repo_dir=self.working_dir, repo_url=self.workspace.working_dir, ini_config=self.ini_config, ini_section='app:main', update_config=True, repo_name=None, repo_host=None) cp = ConfigParser() cp.read(self.ini_config) self.assertEqual( cp.get('app:main', 'unicore.content_repo_urls').strip(), os.path.basename(self.workspace.working_dir)) with open(self.yaml_config, 'r') as fp: data = yaml.safe_load(fp) repo_name = parse_repo_name(self.workspace.working_dir) self.assertEqual(data['repositories'], { repo_name: self.workspace.working_dir })
def main(numthreads=10): t1 = time.time() queue = Queue() factory = TokenFactory() config = ConfigParser() config.read('vk_api.conf') url = API.get_url( app_id=config.get('api', 'id'), app_key=config.get('api', 'key'), permissions=PERMISSIONS, redirect_uri=URI, display=DISPLAY, api_version=VERSION) # TODO: check token expiration token_pair = factory.get_token_pair() if not token_pair: token_pair = factory.store_token_pair(url) api = API(token=token_pair[0],user_id=token_pair[1]) audio = api.audio data = audio.get if data: for item in data['response']['items']: queue.put(item) for i in range(numthreads): t = DownloadThread(queue, FILE_DIR) t.start() queue.join() t2 = time.time() print('Time: {0}'.format(t2-t1))
def __init__(self): handlers = [ (r"/", BrowseHandler), (r"/get/(.*)\.md5sum", SumHandler), (r"/get/(.*)\.zip", ZipHandler), (r"/get/(.*/CHANGES.txt)", tornado.web.StaticFileHandler, {"path": "/opt/www/mirror"}), (r"/get/(.*)", Base62Handler), (r"/api", ApiHandler), (r"/mirror", MirrorApplicationHandler), (r"/static/(.*)", tornado.web.StaticFileHandler, {"path": os.path.join(os.path.dirname(__file__), "static")}), ] settings = dict( debug=options.debug, ) super(Application, self).__init__(handlers, **settings) config = ConfigParser() config.readfp(open(options.config)) # One global connection init_database(create_engine(config.get('database', 'uri'))) self.db = DBSession template_path = os.path.join(os.path.dirname(__file__), "templates") self.lookup = TemplateLookup(directories=[template_path], filesystem_checks=False) self.mirrorpool = WeightedChoice(( ('http://mirror.galliumos.org/%s', 1000), ))
def test_umit_conf_content(filename): parser = ConfigParser() parser.read(filename) # Paths section section = "paths" assert get_or_false(parser, section, "nmap_command_path")
def __init__(self, config_path): self.CONFIG['globals'] = dict(SERVICE_GLOBAL_DEFAULT) self.CONFIG['tcprpc'] = dict(SERVICE_TCPRPC_DEFAULT) self.CONFIG['ssltcprpc'] = dict(SERVICE_SSLTCPRPC_DEFAULT) self.CONFIG['soaprpc'] = dict(SERVICE_SOAPRPC_DEFAULT) self.CONFIG['sslsoaprpc'] = dict(SERVICE_SSLSOAPRPC_DEFAULT) self.CONFIG['udprpc'] = dict(SERVICE_UDPRPC_DEFAULT) self.CONFIG['unixrpc'] = dict(SERVICE_UNIXRPC_DEFAULT) self.CONFIG['unixudprpc'] = dict(SERVICE_UNIXUDPRPC_DEFAULT) self.CONFIG['tcpv6rpc'] = dict(SERVICE_TCPV6RPC_DEFAULT) self.CONFIG['ssltcpv6rpc'] = dict(SERVICE_SSLTCPV6RPC_DEFAULT) self.CONFIG['udpv6rpc'] = dict(SERVICE_UDPV6RPC_DEFAULT) cf = ConfigParser() cf.read(config_path) l = {'true':True,'false':False,'none':None, 'y': True,'yes':True, 'n': False,'no': False, 'enable':True,'disable':False} for s in cf.sections(): if s.lower() in self.CONFIG: for (n,v) in cf.items(s): if v.lower() in l: v = l[v.lower()] elif n.lower() in ('port','timeout','poll_interval','max_children'): v = int(v) if n.lower() in self.CONFIG[s]: if type(v) == str and len(v) > 0: if self.CONFIG[s][n] != v.lower(): self.CONFIG[s][n] = v.lower() elif type(v) in (bool, int, None): if self.CONFIG[s][n] != v and v: self.CONFIG[s][n] = v else: _logger.info( 'Invalid parameter: - %s section: - %s - Ignored' % (n,s)) else: _logger.info( 'Invalid section: - %s - Ignored' % (s,))
def test_import_remote(self): repo_name = parse_repo_name(self.workspace.working_dir) repo_location = 'http://localhost:8080/repos/%s.json' % repo_name responses.add_callback( responses.POST, 'http://localhost:8080/repos.json', callback=lambda _: (301, {'Location': repo_location}, '')) self.tool.run( config=(self.yaml_config, self.config), verbose=True, clobber=False, repo_dir=self.working_dir, repo_url=self.workspace.working_dir, ini_config=self.ini_config, ini_section='app:main', update_config=True, repo_name=None, repo_host='http://localhost:8080') cp = ConfigParser() cp.read(self.ini_config) self.assertEqual( cp.get('app:main', 'unicore.content_repo_urls').strip(), repo_location) with open(self.yaml_config, 'r') as fp: data = yaml.safe_load(fp) self.assertEqual(data['repositories'], { repo_name: self.workspace.working_dir })
def install_mercurial_hook(): """ Installs the mercurial precommit hook by adding a hook to the hgrc file in the .hg directory of the repository. """ repo_dir = get_repo_dir() config_file = os.path.join(repo_dir, '.hg', 'hgrc') config_parser = ConfigParser() config_parser.read(config_file) precommit_abs_file = os.path.join(repo_dir, 'scripts', 'codestyleprecommit.py') section = 'hooks' key = 'pretxncommit.precommit' value = 'python:%s:mercurial_hook' % precommit_abs_file if not config_parser.has_section(section): config_parser.add_section(section) config_parser.set(section, key, value) with open(config_file, 'w') as config: config_parser.write(config)
def _load_configuration(environment, path): """Loads a given configuration file specified by path and environment header (ini file). returns a key value representing the configuration. Values enclosed in {} are automatically decrypted using the $FURTHER_PASSWORD variable. Values that equal [RND] will be replaced with a random string.""" # Read configuration file parser = ConfigParser() parser.read(path) config = {} for option in parser.options(environment): value = parser.get(environment, option) # Handle encrypted configuration if (re.match(r'^\{.*\}$', value)): encrypted_value = re.match(r'^\{(.*)\}$', value).group(1) value = (local('decrypt.sh input="' + encrypted_value + '" password=$FURTHER_PASSWORD algorithm="PBEWithSHA1AndDESede" verbose="false"', capture=True)) # Handle random values if (re.match(r'\[RND\]', value)): value = _random_string() config[option] = value; return config
def parse_image_build_config(self, config_file_name): # Logic taken from koji.cli.koji.handle_image_build. # Unable to re-use koji's code because "cli" is not # a package of koji and this logic is intermingled # with CLI specific instructions. args = [] opts = {} config = ConfigParser() config.readfp(self.get_default_image_build_conf()) config.read(config_file_name) if self.architectures: config.set('image-build', 'arches', ','.join(self.architectures)) elif self.architecture: config.set('image-build', 'arches', self.architecture) # else just use what was provided by the user in image-build.conf config_str = StringIO() config.write(config_str) self.log.debug('Image Build Config: \n%s', config_str.getvalue()) image_name = None section = 'image-build' for option in ('name', 'version', 'arches', 'target', 'install_tree'): value = config.get(section, option) if not value: raise ValueError('{} cannot be empty'.format(option)) if option == 'arches': value = [arch for arch in value.split(',') if arch] elif option == 'name': image_name = value args.append(value) config.remove_option(section, option) for option, value in config.items(section): if option in ('repo', 'format'): value = [v for v in value.split(',') if v] elif option in ('disk_size'): value = int(value) opts[option] = value section = 'ova-options' if config.has_section(section): ova = [] for k, v in config.items(section): ova.append('{}={}'.format(k, v)) opts['ova_option'] = ova section = 'factory-parameters' if config.has_section(section): factory = [] for option, value in config.items(section): factory.append((option, value)) opts['factory_parameter'] = factory return image_name, args, {'opts': opts}
from optparse import OptionParser from jinja2 import Environment, FileSystemLoader from ConfigParser import ConfigParser parser = OptionParser() parser.add_option("-t", "--template", dest="tplfile", default='windows.html', help="the template of config file") parser.add_option("-w", "--output", dest="outputf", default='window.cfg', help="the output config file") (options, args) = parser.parse_args() cf = ConfigParser() if os.path.exists('config.ini'): cf.read('config.ini') else: print 'config.ini not found' jinja_templates = os.path.join(os.path.dirname(__file__), './templates') #print jinja_templates hosts = [] default = {} #item = type('item',(object,),dicts) for item in cf.options('main'): default[item] = cf.get('main', item) for section in cf.sections(): vars = {}
class HostsFileSpider(scrapy.Spider): name = 'hosts-file' allowed_domains = ['hosts-file.net'] start_urls = ['https://hosts-file.net/rss.asp'] config = ConfigParser() config.read("scrapy.cfg") def start_requests(self): self.start = datetime.now() email_message = "The {} start at {}".format(self.name, self.start) threat_email = ThreatEmail() threat_email.send_mail(self.config.get("email_service", "user_name"), self.config.get("email_service", "receivers"), "{} spider information".format(self.name), email_message) yield scrapy.Request(url='https://hosts-file.net/rss.asp', callback=self.parse) def parse(self, response): # 使用RSS文件解析 last_build = response.css("channel lastBuildDate::text").extract()[0] conf = ConfigParser() conf.read("scrapy.cfg") last_last_build = conf.get(self.name, "last_build") now = datetime.utcnow() if last_last_build != last_build: conf.set(self.name, "last_build", last_build) conf.write(open("scrapy.cfg", "w+")) for message_line in response.css("channel item"): host_file_item = HostsFileItem() host_file_item['host_name'] = message_line.css("title::text").extract()[0] host_file_item['link'] = message_line.css("link::text").extract()[0] description = message_line.css("description::text").extract()[0] elements = description.split("<br>") host_file_item["ip"] = elements[1].split(":")[1].strip(" ") host_file_item["host_class"] = elements[2].split(":")[1].strip(" ") host_file_item["submit_time"] = message_line.css("pubDate::text").extract()[0] host_file_item["last_build"] = last_build host_file_item["add_time"] = now yield host_file_item print "========>Synchronization Complete<========" def close(spider, reason): end = datetime.now() email_message = "The {} start at {}, and end at {}".format(spider.name, spider.start, end) threat_email = ThreatEmail() threat_email.send_mail(spider.config.get("email_service", "user_name"), spider.config.get("email_service", "receivers"), "{} spider information".format(spider.name), email_message)
def __init__(self, defaults, *args, **kwargs): self.defaults = defaults ConfigParser.__init__(self, *args, **kwargs)
from urllib2 import urlopen from ConfigParser import SafeConfigParser as ConfigParser else: import urllib.request import urllib.parse import urllib.error import urllib.request import urllib.error import urllib.parse from urllib.request import urlopen from configparser import ConfigParser GANGLIA = 0 GANGLIA_PROC = 0 config = ConfigParser() config.read('slurmbrowser.cfg') GANGLIA = int(config.get('MAIN', 'ganglia')) GANGLIA_PROC = int(config.get('MAIN', 'ganglia_proc')) if GANGLIA: ganglia_server = config.get('Ganglia', 'ganglia_server') ganglia_cluster_name = config.get('Ganglia', 'ganglia_cluster_name') ganglia_api_port = config.get('Ganglia', 'ganglia_api_port') serverurl = config.get('Ganglia', 'serverurl') graphurl = config.get('Ganglia', 'graphurl') ganglia_node_domainname = config.get('Ganglia', 'ganglia_node_domainname') user = config.get('Ganglia', 'user') passwd = config.get('Ganglia', 'passwd') if user:
def __init__(self, hosts, topo_list=None, config_file=None): """Create a new Hadoop cluster with the given hosts and topology. Args: hosts (list of Host): The hosts to be assigned a topology. topo_list (list of str, optional): The racks to be assigned to each host. len(hosts) should be equal to len(topo_list). configFile (str, optional): The path of the config file to be used. """ # Load properties config = ConfigParser(self.defaults) config.add_section("cluster") config.add_section("local") if config_file: config.readfp(open(config_file)) # Deployment properties self.local_base_conf_dir = config.get("local", "local_base_conf_dir") self.init_conf_dir = tempfile.mkdtemp("", "hadoop-init-", "/tmp") self.conf_mandatory_files = [ CORE_CONF_FILE, HDFS_CONF_FILE, MR_CONF_FILE ] # Node properties self.base_dir = config.get("cluster", "hadoop_base_dir") self.conf_dir = config.get("cluster", "hadoop_conf_dir") self.logs_dir = config.get("cluster", "hadoop_logs_dir") self.hadoop_temp_dir = config.get("cluster", "hadoop_temp_dir") self.hdfs_port = config.getint("cluster", "hdfs_port") self.mapred_port = config.getint("cluster", "mapred_port") self.bin_dir = self.base_dir + "/bin" self.sbin_dir = self.base_dir + "/bin" self.java_home = None # Configure master and slaves self.hosts = list(hosts) self.master = self.hosts[0] # Create topology self.topology = HadoopTopology(hosts, topo_list) # Store cluster information self.hw = hw_manager.make_deployment_hardware() self.hw.add_hosts(self.hosts) self.master_cluster = self.hw.get_host_cluster(self.master) # Create a string to display the topology t = {v: [] for v in self.topology.topology.values()} for key, value in self.topology.topology.iteritems(): t[value].append(key.address) log_topo = ', '.join([ style.user2(k) + ': ' + ' '.join(map(lambda x: style.host(x.split('.')[0]), v)) for k, v in t.iteritems() ]) logger.info( "Hadoop cluster created with master %s, hosts %s and " "topology %s", style.host(self.master.address), ' '.join([style.host(h.address.split('.')[0]) for h in self.hosts]), log_topo)
def configure(self, config_file): config = ConfigParser() config.read(os.path.expanduser(config_file)) get = lambda x: config.get('default', x) # noqa self.api_key = get('api_key')
def read_config(self): self.config = ConfigParser() self.config.optionxform = str self.config.read(self.config_file)
Generated by 'django-admin startproject' using Django 1.10.5. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ import os from ConfigParser import ConfigParser # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) config = ConfigParser() config.read(os.path.join(BASE_DIR, '../django.conf')) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = config.get('main', 'SECRET') # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition
class Conf: config = "" vars = "" def __init__(self, config_file): self.config_file = config_file self.read_config() self.load_variables() def read_config(self): self.config = ConfigParser() self.config.optionxform = str self.config.read(self.config_file) def save_config(self): with open(self.config_file, 'wb') as configfile: self.config.write(configfile) def load_variables(self): if self.config.has_section("settings"): par = dict(self.config.items("settings")) for p in par: par[p] = par[p].split("#", 1)[0].strip().replace("\"", "") self.vars = par else: print("Failed to load the settings in the config file.") exit(1) def list_config(self): print("Showing lambda-toolkit configurations:") if "C_DEFAULT_ROLE" in self.vars: print("- Default Role: " + self.vars['C_DEFAULT_ROLE']) print("-----------------") for s in self.config.sections(): if s == self.vars['C_CONFIG_SQS']: if 'C_CONFIG_SQS_QUEUES' in self.vars: queues = Utils.get_list_config( self, self.vars['C_CONFIG_SQS'], self.vars['C_CONFIG_SQS_QUEUES']) if len(queues) != 0: print("SQS (Queues):") for q in queues: print("- Queue name: " + q) print("-----------------") elif s == self.vars['C_CONFIG_LAMBDAPROXY']: lbs = self.config.items(self.vars['C_CONFIG_LAMBDAPROXY']) if len(lbs) != 0: print("Lambda Proxies:") for lb in lbs: print("- Lambda Proxy: " + lb[0] + "\t\t[SQS: " + lb[1] + "]") print("-----------------") elif s == self.vars['C_CONFIG_SETTINGS']: pass else: deployed = self.config.get(s, "deployed") print("- User Lambda Project: " + s + "\t[Deployed: " + deployed + "]") def delete_all_config(self): if 'C_CONFIG_LAMBDAPROXY' in self.vars: for lp in self.config.items(self.vars['C_CONFIG_LAMBDAPROXY']): self.conf = Ltklambdaproxy(self, lp[0]).undeploy_lambda_proxy() if 'C_CONFIG_SQS_QUEUES' in self.vars: queues = Utils.get_list_config(self, self.vars['C_CONFIG_SQS'], self.vars['C_CONFIG_SQS_QUEUES']) for q in queues: self.conf = Queue(self, q).delete_queue() print("Removed all proxies and queues.")
import importlib import sys import os import sphinx import stsci_rtd_theme import sphinx_astropy def setup(app): app.add_stylesheet("stsci.css") from distutils.version import LooseVersion try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('../')) sys.path.insert(0, os.path.abspath('jwst/')) sys.path.insert(0, os.path.abspath('exts/')) # -- General configuration ------------------------------------------------ conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')]) setup_cfg = dict(conf.items('metadata')) # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.3'
import os import sys import unittest from ConfigParser import ConfigParser from spacewalk.server import rhnSQL # Import all test modules here: sys.path.insert( 0, os.path.abspath( os.path.dirname( os.path.abspath(__file__) + "/../non-unit/server/rhnSQL/"))) import dbtests config = ConfigParser() config.read(os.path.dirname(os.path.abspath(__file__)) + "/db_settings.ini") USER = config.get('oracle', 'user') PASSWORD = config.get('oracle', 'password') DATABASE = config.get('oracle', 'database') rhnSQL.initDB(backend="oracle", username=USER, password=PASSWORD, database=DATABASE) # Re-initialize to test re-use of connections: rhnSQL.initDB(backend="oracle", username=USER, password=PASSWORD,
# file location def _Loc(loc=True): """return dir path or full path.""" dir_path = os.path.dirname(os.path.realpath(__file__)) full_path = os.path.realpath(__file__) if loc != False: # return dir path return dir_path else: # returns abs path return full_path # config config = ConfigParser() config.read(_Loc() + '/bck.cfg') db = create_engine(config.get("DATABASE", "SQLALCHEMY_DATABASE_URI")) config.get("DATABASE", "SQLALCHEMY_TRACK_MODIFICATIONS") # database # create all: # execute python term # from database import Base # Base.metadata.create_all() # mymetadata = MetaData(db) Base = declarative_base(metadata=mymetadata) Session = sessionmaker(bind=db) session = Session()
def write_config(section, option, value): ''' This function will write the configuration to the file. ''' filename = HOME + CONFIG_FILE config = ConfigParser() config.read(filename) if not config.has_section(section): config.add_section(section) with open(filename, "w") as file: config.set(section, option, value) config.write(file) return section, option, value
import builtins else: import __builtin__ as builtins builtins._ASTROPY_SETUP_ = True from astropy_helpers.setup_helpers import (register_commands, adjust_compiler, get_debug_option, get_package_info) from astropy_helpers.git_helpers import get_git_devstr from astropy_helpers.version_helpers import generate_version_py # Get some values from the setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read(['setup.cfg']) metadata = dict(conf.items('metadata')) PACKAGENAME = metadata.get('package_name', 'packagename') DESCRIPTION = metadata.get('description', 'Astropy affiliated package') AUTHOR = metadata.get('author', '') AUTHOR_EMAIL = metadata.get('author_email', '') LICENSE = metadata.get('license', 'unknown') URL = metadata.get('url', 'http://astropy.org') # Get the long description from the package's docstring __import__(PACKAGENAME) package = sys.modules[PACKAGENAME] LONG_DESCRIPTION = package.__doc__
def preprocess(args): # Initialize table if args.table is not None: fmt = '%s\t' * len(args.tableinfo) args.table.write(fmt % tuple(args.tableinfo)) # Open configuration file if args.config is not None: cfg = ConfigParser({ 'flagchans': None, 'levels': None, 'levelmode': args.levelmode }) cfg.read(args.config) if cfg.has_section('afoli'): args.flagchans = cfg.get('afoli', 'flagchans') args.levels = cfg.get('afoli', 'levels') args.levelmode = cfg.get('afoli', 'level_mode') # Get spectrum try: # If cube is loaded logger.info('Image shape: %r', args.cube.data.shape) assert args.cube.data.ndim == 4 assert args.cube.data.shape[0] == 1 # Find peak if args.peak is not None: # User value logger.info('Using input peak position') xmax, ymax = args.peak else: xmax, ymax = find_peak(cube=args.cube, rms=args.rms) # Write table if args.table is not None: args.table.write('%5i\t%5i\t' % (xmax, ymax)) # Get spectrum at peak if args.beam_avg: # Beam size logger.info('Averaging over beam') pixsize = np.sqrt(np.abs(args.cube.header['CDELT1'] * \ args.cube.header['CDELT2'])) if args.beam_fwhm: beam_fwhm = args.beam_fwhm[0] / 3600. else: beam_fwhm = np.sqrt(args.cube.header['BMIN'] * \ args.cube.header['BMAJ']) if args.beam_size: beam_sigma = args.beam_size[0] / 3600. else: beam_sigma = beam_fwhm / (2. * (2. * np.log(2))**0.5) beam_sigma = beam_sigma / pixsize logger.info('Beam size (sigma) = %f pix', beam_sigma) # Filter data Y, X = np.indices(args.cube.data.shape[2:]) dist = np.sqrt((X - xmax)**2. + (Y - ymax)**2.) mask = dist <= beam_sigma masked = np.ma.array( args.cube.data[0, :, :, :], mask=np.tile(~mask, (args.cube.data[0, :, :, :].shape[0], 1))) args.spectrum = np.sum(masked, axis=(1, 2)) / np.sum(mask) else: logger.info('Using peak spectra') args.spectrum = args.cube.data[0, :, ymax, xmax] logger.info('Number of channels: %i', len(args.spectrum)) if args.specname: with open(os.path.expanduser(args.specname), 'w') as out: out.write('\n'.join(['%f %f' % fnu for fnu in \ enumerate(args.spectrum)])) ## Off source reference spectrum #if args.ref_pix is not None: # args.ref_spec = np.ma.array(args.cube.data[0,:, # args.ref_pix[1],args.ref_pix[0]], mask=False) # logger.info('Reference pixel mean: %f', np.mean(args.ref_spec)) except AttributeError: # If spectrum is loaded from file logger.info('Spectrum shape: %r', args.spec.shape) if len(args.spec.shape) > 1: logger.info('Selecting second column') args.spectrum = args.spec[:, 1] else: args.spectrum = args.spec # Write table if args.table is not None: args.table.write('%5s\t%5s\t' % ('--', '--'))
from wsgiref.simple_server import make_server global port #wsgi_app = WsgiApplication(application) wsgi_app = WsgiMounter({'DNSWebService' : application}) server = make_server('0.0.0.0', port, wsgi_app) server.serve_forever() pass parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', type = str, default = '/etc/drmsd.ini', help = 'config file') args = parser.parse_args() config = ConfigParser() config.read(args.config) port = config.getint('network', 'port') ackhost = config.get('network', 'ackhost') ackport = config.getint('network', 'ackport') try: gPwd = config.get('security', 'secret') except (NoSectionError, NoOptionError): print('config file "%s" missing "security.secret" option' % args.config, file = sys.stderr, flush = True) sys.exit(1) pass try: gAESKey = config.get('security', 'aes_key') except (NoSectionError, NoOptionError):
def _get_config(filename): if not os.path.isfile(filename): raise IOError, "Configuration file not found" config = ConfigParser() config.read(filename) return config
else: import __builtin__ as builtins builtins._ASTROPY_SETUP_ = True from astropy_helpers.setup_helpers import (register_commands, get_debug_option, get_package_info) from astropy_helpers.git_helpers import get_git_devstr from astropy_helpers.version_helpers import generate_version_py # Get some values from the setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read(['setup.cfg']) metadata = dict(conf.items('metadata')) PACKAGENAME = metadata.get('package_name', 'packagename') DESCRIPTION = metadata.get('description', 'Astropy affiliated package') AUTHOR = metadata.get('author', '') AUTHOR_EMAIL = metadata.get('author_email', '') LICENSE = metadata.get('license', 'unknown') URL = metadata.get('url', 'http://astropy.org') # Get the long description from the package's docstring __import__(PACKAGENAME) package = sys.modules[PACKAGENAME] LONG_DESCRIPTION = package.__doc__
from ConfigParser import ConfigParser import sys if sys.DEV_MODE: conf_path = "james.conf" else: conf_path = "/etc/james/james.conf" config = ConfigParser() config.read(conf_path) def save_config(): config.write(open(conf_path, "w"))
def convert_manifest(json_manifest): assert len(json_manifest["files"]) == 1 assert json_manifest["files"][0]["compressor"] == "gzip" parser = ConfigParser() parser.add_section("snapshot") parser.add_section("chunks") parser.set("snapshot", "description", json_manifest["description"]) parser.set("snapshot", "created_at", json_manifest["created_at"]) parser.set("snapshot", "pack_method", json_manifest["files"][0]["compressor"]) for chunk, md5sum, size in reversed(json_manifest["files"][0]["chunks"]): parser.set("chunks", chunk, md5sum) LOG.debug("CONVERT: %s", parser.items("chunks")) return parser
'%(asctime)s - %(levelname)s - %(funcName)s() %(lineno)d:\t %(message)s', level=logging.INFO) log.startLogging(sys.stdout) import argparse parser = argparse.ArgumentParser( description="Run sputnik sample marketmaker client") parser.add_argument("-c", "--config", dest="config", action="store", help="Configuration file", default="client.ini") parser.add_argument("--debug", dest="debug", action="store_true", help="Enable debugging output", default=False) args = parser.parse_args() config = ConfigParser() config.read(args.config) params = dict(config.items("sputnik")) params.update(dict(config.items("market_maker"))) sputnik = Sputnik(debug=args.debug, bot=MarketMakerBot, **params) sputnik.on("disconnect", lambda x: reactor.stop()) sputnik.connect() reactor.run()
formatstr = FORMAT26 log = logging.getLogger('autopyfactory') hdlr = logging.StreamHandler(sys.stdout) formatter = logging.Formatter(FORMAT23) hdlr.setFormatter(formatter) log.addHandler(hdlr) if debug: log.setLevel(logging.DEBUG) # Override with command line switches if info: log.setLevel(logging.INFO) # Override with command line switches log.debug("Logging initialized.") # Read in config file aconfig = ConfigParser() if not aconfig_file: aconfig_file = os.path.expanduser(default_configfile) else: aconfig_file = os.path.expanduser(aconfig_file) got_config = aconfig.read(aconfig_file) log.debug("Read config file %s, return value: %s" % (aconfig_file, got_config)) am = AuthManager(aconfig) am.reconfig(aconfig) log.info("Authmanager created. Starting handlers...") am.startHandlers() #am.start() try:
if options.interface == "127.0.0.1": options.interface = "::1" if options.nameservers == "8.8.8.8": options.nameservers = "2001:4860:4860::8888" print " | DNSChef started on interface: %s " % options.interface # Use alternative DNS servers if options.nameservers: nameservers = options.nameservers.split(',') print " | Using the following nameservers: %s" % ", ".join(nameservers) # External file definitions if options.file: config = ConfigParser() config.read(options.file) for section in config.sections(): if section in nametodns: for domain,record in config.items(section): # Make domain case insensitive domain = domain.lower() nametodns[section][domain] = record print " [+] Cooking %s replies for domain %s with '%s'" % (section,domain,record) else: print " [!] DNS Record '%s' is not supported. Ignoring section contents." % section # DNS Record and Domain Name definitions
return None return [i.strip() for i in string.strip().split(',')] ### Config file values ### # read the config file if not exists(CONFIG_FILE_PATH) and not exists(CONFIG_DEFAULTS_FILE_PATH): print "Unable to find any config settings! Please create one of these two files:" print "", CONFIG_FILE_PATH print "", CONFIG_DEFAULTS_FILE_PATH exit(1) if not exists(CONFIG_FILE_PATH): copyfile(CONFIG_DEFAULTS_FILE_PATH, CONFIG_FILE_PATH) _config = ConfigParser() _config.read(CONFIG_FILE_PATH) # Extract the config values - reference these in calling code # NOTE: keys from config files are forced to lower-case when they are read by ConfigParser # which extractor backend to use for loading data dataExtractor = _configSection('DataExtractor') # set DEFAULT value if necessary dataExtractor['dataSource'] = dataExtractor.get('datasource', 'DEFAULT').lower() # baseURL, repository, user, password archivesSpace = _configSection('ArchivesSpace') if archivesSpace: archivesSpace['repository_url'] = '%s/repositories/%s' % (
def __init__(self, txt): sfp = StringIO.StringIO(txt) c = ConfigParser() c.readfp(sfp) self.c = c self.d = self.full(c)
#!/usr/bin/python # -*- coding:utf-8 -*- import sys, json sys.path.append('../') from src.com.dis.client import disclient from src.com.dis.models.base_model import IS_PYTHON2 if IS_PYTHON2: from ConfigParser import ConfigParser else: from configparser import ConfigParser fp = '../conf.ini' conf = ConfigParser() conf.read(fp) # Use configuration file try: projectid = conf.get('Section1', 'projectid') ak = conf.get('Section1', 'ak') sk = conf.get('Section1', 'sk') region = conf.get('Section1', 'region') endpoint = conf.get('Section1', 'endpoint') except Exception as ex: print(str(ex)) ''' Enter the following information ''' # projectid = "your projectid" # endpoint = " " # ak = "*** Provide your Access Key ***"
unichr(135): unichr(8225), # double dagger unichr(136): unichr(710), # modifier letter circumflex accent unichr(137): unichr(8240), # per mille sign unichr(138): unichr(352), # latin capital letter s with caron unichr(139): unichr(8249), # single left-pointing angle quotation mark unichr(140): unichr(338), # latin capital ligature oe unichr(142): unichr(381), # latin capital letter z with caron unichr(145): unichr(8216), # left single quotation mark unichr(146): unichr(8217), # right single quotation mark unichr(147): unichr(8220), # left double quotation mark unichr(148): unichr(8221), # right double quotation mark unichr(149): unichr(8226), # bullet unichr(150): unichr(8211), # en dash unichr(151): unichr(8212), # em dash unichr(152): unichr(732), # small tilde unichr(153): unichr(8482), # trade mark sign unichr(154): unichr(353), # latin small letter s with caron unichr(155): unichr(8250), # single right-pointing angle quotation mark unichr(156): unichr(339), # latin small ligature oe unichr(158): unichr(382), # latin small letter z with caron unichr(159): unichr(376) } # latin capital letter y with diaeresis if __name__ == "__main__": # small main program which converts OPML into config.ini format import sys, urllib config = ConfigParser() for opml in sys.argv[1:]: opml2config(urllib.urlopen(opml), config) config.write(sys.stdout)
log_file_path = 'log.txt' #vvvvvvvvvv StrNow() functions vvvvvvvvvv# def StrNow(): return strftime("[%Y-%m-%d_%H-%M-%S] ", localtime()) #vvvvvvvvvv UpdateMsg() functions vvvvvvvvvv# def UpdateMsg(Msg_str): print StrNow() + ' ' + Msg_str open(log_file_path, 'a').write(StrNow() + Msg_str + '\n') return Config = ConfigParser() Config.read(config_file_path) Work_DB_Path = Config.get('Initial_Setting', 'Work_DB_Path') Data_DB_Path = Config.get('Initial_Setting', 'Data_DB_Path') Crawler_Work_Queue_Size = Config.getint('Initial_Setting', 'Crawler_Work_Queue_Size') Crawler_Data_Queue_Size = Config.getint('Initial_Setting', 'Crawler_Data_Queue_Size') Worker_Num = Config.getint('Initial_Setting', 'Worker_Num') Crawler_Work_Queue = Queue(Crawler_Work_Queue_Size) Crawler_Data_Queue = Queue(Crawler_Data_Queue_Size)
""" parser = argparse.ArgumentParser( description = description, version = __version__ ) parser.add_argument("inifile", help="The configuration (.ini) file") # parse input options opts = parser.parse_args() inifile = opts.inifile startcron = False # variable to say whether to create the crontab (if this is the first time the script is run then this will be changed to True later) cronid = 'knopeJob' # an ID for the crontab job croncommand = '%s -i {0}' % sys.argv[0] # set the cron command (which will re-run this script) # open and parse config file cp = ConfigParser() try: cp.read(inifile) except: print("Error... cannot parse configuration file '%s'" % inifile, file=sys.stderr) sys.exit(1) # if configuration file has previous_endtimes option then the cronjob must have started if not cp.has_option('times', 'previous_endtimes'): # make sure to start the crontab job startcron = True # open and parse the run configuation file if cp.has_option('configuration', 'file'): runconfig = cp.get('configuration', 'file') # Get main configuration ini template for the run if not os.path.isfile(runconfig):