def read_metadata(filename): if filename.endswith('bz2'): fname = os.path.splitext(filename)[0] else: fname = filename if os.path.splitext(fname)[1] == '.cbf': try: import pycbf except ImportError: raise NeXusError('Reading CBF files requires the pycbf module') cbf = pycbf.cbf_handle_struct() cbf.read_file(fname, pycbf.MSG_DIGEST) cbf.select_datablock(0) cbf.select_category(0) cbf.select_column(1) meta_text = cbf.get_value().splitlines() date_string = meta_text[2][2:] time_stamp = epoch(date_string) exposure = float(meta_text[5].split()[2]) summed_exposures = 1 return time_stamp, exposure, summed_exposures elif os.path.exists(fname+'.metadata'): parser = ConfigParser() parser.read(fname+'.metadata') return (parser.getfloat('metadata', 'timeStamp'), parser.getfloat('metadata', 'exposureTime'), parser.getint('metadata', 'summedExposures')) else: return time.time(), 1.0, 1
class SecretSanta(object): def __init__(self, config): log.debug("Constructing SecretSanta main object.") self.people = [] self.givers = [] self.receivers = [] self.pcfile = config.get('global','peopleconf') log.debug("Peopleconf is %s" % self.pcfile) self.pc=ConfigParser() self.pc.read(self.pcfile) for sect in self.pc.sections(): p = SecretSantaPerson(sect, self.pc) self.people.append(p) for p in self.givers: self.receivers.append(p) def matchall(self): ''' Perform matching for all people with constraints. ''' rlist = [] for p in self.people: rlist.append(p) shuffle(rlist) log.debug("Performing matching...") for p in self.people: r = rlist.pop() while not p.matchok(r): rlist.append(r) shuffle(rlist) r = rlist.pop() p.receiver = r log.debug("%s -> %s\n" % (p.name, p.receiver.name)) def list(self): ''' Return string representation of all people in config. ''' log.debug("List all users...") s = "" for p in self.people: s+= str(p) return s def giverslist(self): ''' Return string in form of: Joe Bloe -> Mary Margaret Mary Margaret -> Susan Strong Susan Strong -> Joe Bloe ''' s = "" for p in self.people: s+= "%s -> %s\n" % ( p.name, p.receiver.name) return s
def init(): options.config = os.path.abspath( options.config ) if options.username == 'all': options.username = None if options.email == 'all': options.email = None os.chdir( os.path.dirname( options.config ) ) sys.path.append( 'lib' ) from galaxy import eggs import pkg_resources import galaxy.config from galaxy.objectstore import build_object_store_from_config # lazy globals()['nice_size'] = __import__( 'galaxy.util', globals(), locals(), ( 'nice_size', ) ).nice_size config_parser = ConfigParser( dict( here = os.getcwd(), database_connection = 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) ) config_parser.read( os.path.basename( options.config ) ) config_dict = {} for key, value in config_parser.items( "app:main" ): config_dict[key] = value config = galaxy.config.Configuration( **config_dict ) object_store = build_object_store_from_config( config ) from galaxy.model import mapping return mapping.init( config.file_path, config.database_connection, create_tables = False, object_store = object_store ), object_store, config.database_connection.split(':')[0]
def main(numthreads=10): t1 = time.time() queue = Queue() factory = TokenFactory() config = ConfigParser() config.read('vk_api.conf') url = API.get_url( app_id=config.get('api', 'id'), app_key=config.get('api', 'key'), permissions=PERMISSIONS, redirect_uri=URI, display=DISPLAY, api_version=VERSION) # TODO: check token expiration token_pair = factory.get_token_pair() if not token_pair: token_pair = factory.store_token_pair(url) api = API(token=token_pair[0],user_id=token_pair[1]) audio = api.audio data = audio.get if data: for item in data['response']['items']: queue.put(item) for i in range(numthreads): t = DownloadThread(queue, FILE_DIR) t.start() queue.join() t2 = time.time() print('Time: {0}'.format(t2-t1))
class ConfigLoader(): def __init__(self): try: open(cfg_name, 'r').close() except: sys.stderr.write(u'严重错误,无法读取配置文件!程序自动退出。\n') exit(-1) self.config = ConfigParser() self.config.read(cfg_name) self.system = dict(self.config.items('system')) self.config_check() def config_check(self): try: # 首先检查设定的全局存储目录是否合法 check_path(self.read('global_pos')) # 然后检查管理员的下载目录是否存在 root_path = os.path.join(self.read('global_pos'), 'root') if not os.path.exists(root_path): os.mkdir(root_path) except Exception, err: sys.stderr.write(u'系统错误,原因:%s\n' % err) exit(-1) # 接下来检查端口是否可用 if check_port(self.read('port_name')): sys.stderr.write(u'系统错误,端口被占用!\n') exit(-1)
def __init__(self, fn): p = ConfigParser() p.read(fn) self._page_store_mongodb_url = p.get("page_store", "mongo_db_url") self._cv_page_store_mongodb_url = p.get("cv_page_store", "mongo_db_url") self._jd_51job_raw_store_mongo_url = p.get("jd_51job_parse_measure", "jd_raw_store_mongo_url") self._jd_51job_measure_store_mongo_url = p.get("jd_51job_parse_measure", "jd_measure_store_mongo_url") self._cv_51job_raw_store_mongo_url = p.get("cv_51job_parse_measure", "cv_raw_store_mongo_url") self._cv_51job_measure_store_mongo_url = p.get("cv_51job_parse_measure", "cv_measure_store_mongo_url") self._jd_zhilian_raw_store_mongo_url = p.get("jd_zhilian_parse_measure", "jd_raw_store_mongo_url") self._jd_zhilian_measure_store_mongo_url = p.get("jd_zhilian_parse_measure", "jd_measure_store_mongo_url") self._cv_zhilian_raw_store_mongo_url = p.get("cv_zhilian_parse_measure", "cv_raw_store_mongo_url") self._cv_zhilian_measure_store_mongo_url = p.get("cv_zhilian_parse_measure", "cv_measure_store_mongo_url") self._cv_liepin_raw_store_mongo_url = p.get('cv_liepin_parse_measure', 'cv_raw_store_mongo_url') self._cv_liepin_measure_store_mongo_url = p.get('cv_liepin_parse_measure', 'cv_measure_store_mongo_url') self._cv_hash_table = p.get('cv_hash_table', 'mongo_db_url') self._jd_lagou_raw_store_mongo_url = p.get("jd_lagou_parse_measure", "jd_raw_store_mongo_url") self._jd_lagou_measure_store_mongo_url = p.get("jd_lagou_parse_measure", "jd_measure_store_mongo_url") self._zhineng_salary5_charts_mongo_url = p.get("zhineng", "zhineng_salary5_charts_mongodb_url") self._inc_stats2_mongo_url = p.get("zhineng", "inc_stats2_mongo_url") self._2c_ipin_mongo_url = p.get("2c_ipin", "mongo_db_url")
def getDataSources(fName = None): ''' return data sources directories for this machine. directories are defined in datasources.ini or provided filepath''' import socket from ConfigParser import ConfigParser pcName = socket.gethostname() p = ConfigParser() p.optionxform = str if fName is None: fName = 'datasources.ini' p.read(fName) if pcName not in p.sections(): raise NameError('Host name section %s not found in file %s' %(pcName,fName)) dataSources = {} for option in p.options(pcName): dataSources[option] = p.get(pcName,option) return dataSources
def sync(): # Add or replace the relevant properites from galaxy.ini # into reports.ini reports_config_file = "config/reports.ini" if len(argv) > 1: reports_config_file = argv[1] universe_config_file = "config/galaxy.ini" if len(argv) > 2: universe_config_file = argv[2] parser = ConfigParser() parser.read(universe_config_file) with open(reports_config_file, "r") as f: reports_config_lines = f.readlines() replaced_properties = set([]) with open(reports_config_file, "w") as f: # Write all properties from reports config replacing as # needed. for reports_config_line in reports_config_lines: (line, replaced_property) = get_synced_line(reports_config_line, parser) if replaced_property: replaced_properties.add(replaced_property) f.write(line) # If any properties appear in universe config and not in # reports write these as well. for replacement_property in REPLACE_PROPERTIES: if parser.has_option(MAIN_SECTION, replacement_property) and \ not (replacement_property in replaced_properties): f.write(get_universe_line(replacement_property, parser))
class Config(object): def __new__(type, *args, **kwargs): if not '_the_instance' in type.__dict__: type._the_instance = object.__new__(type) return type._the_instance def __init__(self, filename = None): if filename != None: self.filename = filename self.config = ConfigParser() self.config.read(self.filename) def get_section(self,name): if self.config.has_section(name): return _Section(name, self.config.items(name), self) else: return _Section(name, [], self) def __getattr__(self, attr): if attr == 'irc': return self.get_section('IRC') elif attr == 'ldap': return self.get_section('LDAP') elif attr == 'rpc': return self.get_section('RPC') elif attr == 'bot': return self.get_section('Bot') elif attr == 'smtp': return self.get_section('SMTP') elif attr == 'db': return self.get_section('Database') elif attr == 'identica': return self.get_section('Identi.ca') else: raise AttributeError('No section \'%s\' in Config.' % attr)
def version(self, extended=False): """ What version is cobbler? If extended == False, returns a float for backwards compatibility If extended == True, returns a dict: gitstamp -- the last git commit hash gitdate -- the last git commit date on the builder machine builddate -- the time of the build version -- something like "1.3.2" version_tuple -- something like [ 1, 3, 2 ] """ config = ConfigParser() config.read("/etc/cobbler/version") data = {} data["gitdate"] = config.get("cobbler", "gitdate") data["gitstamp"] = config.get("cobbler", "gitstamp") data["builddate"] = config.get("cobbler", "builddate") data["version"] = config.get("cobbler", "version") # dont actually read the version_tuple from the version file data["version_tuple"] = [] for num in data["version"].split("."): data["version_tuple"].append(int(num)) if not extended: # for backwards compatibility and use with koan's comparisons elems = data["version_tuple"] return int(elems[0]) + 0.1 * int(elems[1]) + 0.001 * int(elems[2]) else: return data
class Config(object): def __init__(self): self.parser = ConfigParser() self.read_configuration() def read_configuration(self): # check if we are in svn working dir if not os.path.isdir('.svn'): raise ExsvnError("Current directory is not a svn working directory") fullcfgname = os.path.join('.svn', CONFIG_FILENAME) if not os.path.exists(fullcfgname): self.create_configuration(fullcfgname) self.parser.read(fullcfgname) def create_configuration(self, fname): """Create new configuration file""" print "Creating default configuration in %s" % fname cfg = self.get_default_configuration() f = file(fname, "w") f.write(cfg) f.close() # protect from others os.chmod(fname, 0600) def get_default_configuration(self): return """
def get_mapping_fields(self, cr, uid, shop_id, context=None, reverse=False): result = {} if not context: context = {} shop_pool = self.pool.get('sale.shop') shop_data = shop_pool.read(cr, uid, shop_id, ['prestashop_config_path']) if not shop_data['prestashop_config_path'] or \ not shop_data['prestashop_config_path'].endswith(".conf") or\ not self._prestashop: return result,False config = ConfigParser() config.read(shop_data['prestashop_config_path']) if not self._name in config.sections(): return result,False mapping = dict(config.items(self._name)) return eval(mapping.get('type_mapping',"[]"))
def getMysqlConfig(self, db = 'test'): """获取mysql连接配置 - 依赖配置文件[conf/config.ini],节点[db] Returns: dbconfig dict. """ try: cf = ConfigParser() cf.read('conf/config.ini') dbconfig = { 'host': cf.get('db', 'host'), 'port': cf.getint('db', 'port'), 'user': cf.get('db', 'user'), 'passwd': cf.get('db', 'passwd'), 'db': db } return dbconfig except Exception as e: error = """Can't load config from [conf/config.ini] or [db] node doesn't exist.\n Please make sure this file.""" logging.warning(error) print(error) raise Exception(e)
def _load_object_post_as_copy_conf(self, conf): if ('object_post_as_copy' in conf or '__file__' not in conf): # Option is explicitly set in middleware conf. In that case, # we assume operator knows what he's doing. # This takes preference over the one set in proxy app return cp = ConfigParser() if os.path.isdir(conf['__file__']): read_conf_dir(cp, conf['__file__']) else: cp.read(conf['__file__']) try: pipe = cp.get("pipeline:main", "pipeline") except (NoSectionError, NoOptionError): return proxy_name = pipe.rsplit(None, 1)[-1] proxy_section = "app:" + proxy_name try: conf['object_post_as_copy'] = cp.get(proxy_section, 'object_post_as_copy') except (NoSectionError, NoOptionError): pass
def process_config(directory, config_file, sp): config = ConfigParser() config.read(config_file) sections = config.sections() db_or_element = db_or_element_format(sp, sections) if db_or_element == "db": invalid_sections = fc.invalid_config_sections(directory, config_file, sp.full_header_data("db")) elif db_or_element == "element": invalid_sections = fc.invalid_config_sections(directory, config_file, sp.full_header_data("element")) else: return "error" for s in sections: fname = config.get(s, "file_name") header = config.get(s, "header") if s in invalid_sections: if os.path.exists(directory + fname): os.remove(directory + fname) else: with open(directory + s + "_temp.txt", "w") as w: w.write(header + "\n") with open(directory + fname, "r") as r: for line in r: w.write(line) os.remove(directory + fname) os.rename(directory + s + "_temp.txt", directory + fname) os.remove(config_file) if len(invalid_sections) == 0: return None return {"invalid_sections":invalid_sections}
class Config(object): _raw_options = ('fetch_command', 'player_command') _options = ('media_dir',) _expanduser = ('media_dir',) def __init__(self, my_file): my_file = os.path.expanduser(my_file) if not os.path.exists(my_file): with codecs.open(my_file, 'w', encoding='utf-8') as fp: fp.write(config_file) raise MarrieError( 'Missing config file: %s. It will be created for you.' % my_file) self._cp = ConfigParser() self._cp.read(my_file) for opt in (self._raw_options + self._options): if not self._cp.has_option('config', opt): raise MarrieError('Missing needed config option: config:%s' \ % opt) def __getattr__(self, attr): opt = None if attr in self._raw_options: opt = self._cp.get('config', attr, True) elif attr in self._options: opt = self._cp.get('config', attr) elif attr == 'podcast': opt = OrderedDict(self._cp.items('podcast')) if opt is None: raise AttributeError(attr) if attr in self._expanduser and not isinstance(opt, dict): return os.path.expanduser(opt) return opt
def getConfig(self): self.cursor.execute("SELECT name, host, port, account, account_password, base_dn, attr_login, attr_firstname, attr_lastname, attr_mail, tls FROM auth_sources") self.results = self.cursor.fetchall() dic = {} if self.results <> (): for i in range(0,11): dic[ self.indice[ ( i ) ] ] = self.results[0][i] dic["fonte_config"] = "redmine" return dic else: for i in self.indice: dic[ i ] = "Sem regsitro" from ConfigParser import ConfigParser conf = ConfigParser() conf.read('/etc/ldap/netcontrol') dic["nomeServidor"] = conf.get('base','organization') dic["enderecoServidor"] = conf.get('base','server') dic["contaLDAP"] = "cn=admin,%s"%conf.get('base','bindDN') dic["baseDN"] = conf.get('base','bindDN') dic["senhaContLDAP"] = conf.get('base','adminPW') dic["email"] = conf.get('base','email') dic["porta"] = "389" dic["usuario"] = "uid" dic["nomeUser"] = "******" dic["sobrenome"] = "sN" dic["fonte_config"] = "arquivo_ldap" del conf return dic
def startService(self): BaseUpstreamScheduler.startService(self) log.msg("starting l10n scheduler") if self.inipath is None: # testing, don't trigger tree builds return # trigger tree builds for our trees, clear() first cp = ConfigParser() cp.read(self.inipath) self.trees.clear() _ds = [] for tree in cp.sections(): # create a BuildSet, submit it to the BuildMaster props = properties.Properties() props.update({ 'tree': tree, 'l10nbuilds': self.inipath, }, "Scheduler") bs = buildset.BuildSet([self.treebuilder], SourceStamp(), properties=props) self.submitBuildSet(bs) _ds.append(bs.waitUntilFinished()) d = defer.DeferredList(_ds) d.addCallback(self.onTreesBuilt) self.waitOnTree = d
class UserConfig: def __init__(self, filename=default_user_config_file): self.parser = ConfigParser() self.parser.read(filename) self.filename = filename if not isfile(self.filename): self.parser.add_section('diaspora') self.set_activated(False) self.__save() else: self.parser.read(self.filename) if not self.parser.has_section('diaspora'): self.parser.add_section('diaspora') def is_installed(self): return self.parser.getboolean('diaspora', 'activated') def set_activated(self, value): self.parser.set('diaspora', 'activated', str(value)) self.__save() def __save(self): with open(self.filename, 'wb') as f: self.parser.write(f)
def config(): global video_format global resolution configr = ConfigParser() configr.read('settings.ini') quality = configr.get('SETTINGS', 'video_quality') qualities = {'android': ['107', '71'], '360p': ['106', '60'], '480p': ['106', '61'], '720p': ['106', '62'], '1080p': ['108', '80'], 'highest': ['0', '0']} video_format = qualities[quality][0] resolution = qualities[quality][1] global lang global lang2 lang = configr.get('SETTINGS', 'language') lang2 = configr.get('SETTINGS', 'language2') langd = {'Espanol_Espana': u'Español (Espana)', 'Francais': u'Français (France)', 'Portugues': u'Português (Brasil)', 'English': u'English', 'Espanol': u'Español', 'Turkce': u'Türkçe', 'Italiano': u'Italiano', 'Arabic': u'العربية', 'Deutsch': u'Deutsch'} lang = langd[lang] lang2 = langd[lang2] forcesub = configr.getboolean('SETTINGS', 'forcesubtitle') global forceusa forceusa = configr.getboolean('SETTINGS', 'forceusa') global localizecookies localizecookies = configr.getboolean('SETTINGS', 'localizecookies') onlymainsub = configr.getboolean('SETTINGS', 'onlymainsub') return [lang, lang2, forcesub, forceusa, localizecookies, quality, onlymainsub]
def auth_springpad_client(config_file=None): """ Auth the application to make to be allowed to access to user's springpad account. If not a valid config file is provided it will prompt the user to visit an url in order to allow the application to access to its account and it will store authentication details in `config_file`. args: config_file: the configuration file path to be used. If it is not provided, '~/.springpad' will be used instead. returns: SpringpadClient instance to interact with the authenticated account. """ config_file = config_file or os.path.expanduser("~/.springpad") config = ConfigParser() config.read([config_file]) token = config.get("access", "token") if config.has_option("access", "token") else None if token: token = oauth.OAuthToken.from_string(token) client = SpringpadClient(SPRINGPAD_CONSUMER_KEY, SPRINGPAD_CONSUMER_PRIVATE, token) if not client.access_token: req_token = client.get_request_token() print "Please grant access to your springpad account in the following url:\n" print "http://springpad.com/api/oauth-authorize?{0}\n".format(req_token) print "Once authorized, press intro to continue:", raw_input() client.access_token = client.get_access_token(req_token) config.add_section("access") config.set("access", "token", str(client.access_token)) with open(os.path.expanduser(config_file), "w") as cf: config.write(cf) return client
def get_config_parameter(jobname, parameter_name, is_parameter_collection = False): """Detect export method of JOBNAME. Basically, parse JOBNAME.cfg and return export_method. Return None if problem found.""" jobconfig = ConfigParser() jobconffile = CFG_ETCDIR + os.sep + 'bibexport' + os.sep + jobname + '.cfg' if not os.path.exists(jobconffile): write_message("ERROR: cannot find config file %s." % jobconffile) return None jobconfig.read(jobconffile) if is_parameter_collection: all_items = jobconfig.items(section='export_job') parameters = [] for item_name, item_value in all_items: if item_name.startswith(parameter_name): parameters.append(item_value) return parameters else: parameter = jobconfig.get('export_job', parameter_name) return parameter
def restore_rois(self, roifile): """restore ROI setting from ROI.dat file""" cp = ConfigParser() cp.read(roifile) rois = [] self.mcas[0].clear_rois() prefix = self.mcas[0]._prefix if prefix.endswith('.'): prefix = prefix[:-1] iroi = 0 for a in cp.options('rois'): if a.lower().startswith('roi'): name, dat = cp.get('rois', a).split('|') lims = [int(i) for i in dat.split()] lo, hi = lims[0], lims[1] roi = ROI(prefix=prefix, roi=iroi) roi.left = lo roi.right = hi roi.name = name.strip() rois.append(roi) iroi += 1 epics.poll(0.050, 1.0) self.mcas[0].set_rois(rois) cal0 = self.mcas[0].get_calib() for mca in self.mcas[1:]: mca.set_rois(rois, calib=cal0)
def test_import_local(self): self.tool.run( config=(self.yaml_config, self.config), verbose=True, clobber=False, repo_dir=self.working_dir, repo_url=self.workspace.working_dir, ini_config=self.ini_config, ini_section='app:main', update_config=True, repo_name=None, repo_host=None) cp = ConfigParser() cp.read(self.ini_config) self.assertEqual( cp.get('app:main', 'unicore.content_repo_urls').strip(), os.path.basename(self.workspace.working_dir)) with open(self.yaml_config, 'r') as fp: data = yaml.safe_load(fp) repo_name = parse_repo_name(self.workspace.working_dir) self.assertEqual(data['repositories'], { repo_name: self.workspace.working_dir })
def test_import_remote(self): repo_name = parse_repo_name(self.workspace.working_dir) repo_location = 'http://localhost:8080/repos/%s.json' % repo_name responses.add_callback( responses.POST, 'http://localhost:8080/repos.json', callback=lambda _: (301, {'Location': repo_location}, '')) self.tool.run( config=(self.yaml_config, self.config), verbose=True, clobber=False, repo_dir=self.working_dir, repo_url=self.workspace.working_dir, ini_config=self.ini_config, ini_section='app:main', update_config=True, repo_name=None, repo_host='http://localhost:8080') cp = ConfigParser() cp.read(self.ini_config) self.assertEqual( cp.get('app:main', 'unicore.content_repo_urls').strip(), repo_location) with open(self.yaml_config, 'r') as fp: data = yaml.safe_load(fp) self.assertEqual(data['repositories'], { repo_name: self.workspace.working_dir })
def initConfig(self): kate.debug("initConfig()") config_path = kate.pate.pluginDirectories[1] + "/%s/%s.conf" % (__name__, __name__) config_file = QFileInfo(config_path) if not config_file.exists(): open(config_path, "w").close() config = ConfigParser() config.read(config_path) # init the DEFAULT options if they don't exist # the DEFAULT section is special and doesn't need to be created: if not config.has_section('DEFAULT'): config.add_section('DEFAULT') if not config.has_option("DEFAULT", "ignore"): config.set("DEFAULT", "ignore", "") if not config.has_option("DEFAULT", "filter"): config.set("DEFAULT", "filter", "*") if not config.has_option("DEFAULT", "finder_size"): config.set("DEFAULT", "finder_size", "400x450") if not config.has_option("DEFAULT", "config_size"): config.set("DEFAULT", "config_size", "300x350") if not config.has_option("DEFAULT", "search_type"): config.set("DEFAULT", "search_type", "word") # create the general section if it doesn't exist if not config.has_section("general"): config.add_section("general") # flush the config file config.write(open(config_path, "w")) # save the config object and config path as instance vars for use later self.config = config self.config_path = config_path
def _set_config_all(self, path_to_file): out = sys.stdout if not os.access(path_to_file, os.R_OK): self.log.warning( "cannot access file %s" % path_to_file ) return elif not self.env.config: self.log.warning( "cannot access config file trac.ini" ) return cfg = ConfigParser() cfg.read(path_to_file) if os.access(self.env.path, os.W_OK): path_to_trac_ini = os.path.join(self.env.path, 'conf', 'trac.ini') shutil.copy(path_to_trac_ini, path_to_trac_ini + '.bak') out.write( "created a backup of trac.ini to %s.bak" % path_to_trac_ini ) out.write('\n') else: out.write( "could not create backup of trac.ini - continue anyway? [y|n] " ) input = sys.stdin.readline() if not input or not input.strip() == 'y': return for sect in cfg.sections(): for opt in cfg.options(sect): self.config.set(sect, opt, cfg.get(sect, opt)) out.write( "added config [%s] %s = %s" % (sect, opt, cfg.get(sect, opt)) ) out.write('\n') self.config.save()
def _load_configuration(environment, path): """Loads a given configuration file specified by path and environment header (ini file). returns a key value representing the configuration. Values enclosed in {} are automatically decrypted using the $FURTHER_PASSWORD variable. Values that equal [RND] will be replaced with a random string.""" # Read configuration file parser = ConfigParser() parser.read(path) config = {} for option in parser.options(environment): value = parser.get(environment, option) # Handle encrypted configuration if (re.match(r'^\{.*\}$', value)): encrypted_value = re.match(r'^\{(.*)\}$', value).group(1) value = (local('decrypt.sh input="' + encrypted_value + '" password=$FURTHER_PASSWORD algorithm="PBEWithSHA1AndDESede" verbose="false"', capture=True)) # Handle random values if (re.match(r'\[RND\]', value)): value = _random_string() config[option] = value; return config
def start(self): from scheduler import Tree loog = self.addLog("stdio") self.pending = 0 properties = self.build.getProperties() self.rendered_tree = tree = properties.render(self.treename) l10nbuilds = properties.render(self.l10nbuilds) cp = ConfigParser() cp.read(l10nbuilds) repo = cp.get(tree, "repo") branch = cp.get(tree, "mozilla") path = cp.get(tree, "l10n.ini") l10nbranch = cp.get(tree, "l10n") locales = cp.get(tree, "locales") if locales == "all": alllocales = "yes" else: alllocales = "no" properties.update({"locales": filter(None, locales.split())}, "Build") self.tree = Tree(self.rendered_tree, repo, branch, l10nbranch, path) loog.addStdout("Loading l10n.inis for %s\n" % self.rendered_tree) logger.debug( "scheduler.l10n.tree", "Loading l10n.inis for %s, alllocales: %s" % (self.rendered_tree, alllocales) ) self.loadIni(repo, branch, path, alllocales)
def read_config(self, config_path): config_parser = ConfigParser() config_parser.read(config_path) for section in config_parser.sections(): for option in config_parser.options(section): value = config_parser.get(section, option) setattr(self, option, value)
"Python 'coverage' module is required" exit(-1) # Create directory suffix = "_" + str(int(time.time())) cov_dir = tempfile.mkdtemp(suffix, "m2_coverage_") # Create configuration file coveragerc = os.path.join(cov_dir, ".coveragerc") coverage = os.path.join(cov_dir, ".coverage") from ConfigParser import ConfigParser from os.path import expanduser config = ConfigParser() config.read(['/etc/coveragerc', expanduser('~/.coveragerc')]) if not config.has_section('run'): config.add_section('run') config.set('run', 'data_file', coverage) config.write(open(coveragerc, 'w')) # Add arguments to tests command line testset.add_additionnal_args( ["-m", "coverage", "run", "--rcfile", coveragerc, "-a"]) # Inform the user d = { "blue": cosmetics.colors['blue'], "end": cosmetics.colors['end'], "cov_dir": cov_dir }
else: import __builtin__ as builtins builtins._ASTROPY_SETUP_ = True from astropy_helpers.setup_helpers import (register_commands, adjust_compiler, get_debug_option, get_package_info) from astropy_helpers.git_helpers import get_git_devstr from astropy_helpers.version_helpers import generate_version_py # Get some values from the setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read(['setup.cfg']) metadata = dict(conf.items('metadata')) PACKAGENAME = metadata.get('package_name', 'packagename').replace('-', '_') DESCRIPTION = metadata.get('description', 'Astropy affiliated package') AUTHOR = metadata.get('author', '') AUTHOR_EMAIL = metadata.get('author_email', '') LICENSE = metadata.get('license', 'unknown') URL = metadata.get('url', 'http://astropy.org') # Get the long description from the package's docstring __import__(PACKAGENAME) package = sys.modules[PACKAGENAME] LONG_DESCRIPTION = package.__doc__ # Store the package name in a built-in variable so it's easy
class TreeviewLanguages(gtk.TreeView): def __init__(self, config_path): gtk.TreeView.__init__(self) self.config = ConfigParser() self.config_path = config_path self.model_from_lang = gtk.ListStore(str, str) self.model_to_lang = gtk.ListStore(str, str) # Sort the LANGUAGUES by the value languages = sorted(LANGUAGES.items(), key=itemgetter(1)) # The * means that google will try to guess the source language self.model_from_lang.append(['*', '']) for code, name in languages: self.model_from_lang.append([name, code]) for code, name in languages: self.model_to_lang.append([name, code]) render_from_lang = gtk.CellRendererCombo() render_from_lang.set_property("text-column", 0) render_from_lang.set_property("editable", True) render_from_lang.set_property("has-entry", False) render_from_lang.set_property("model", self.model_from_lang) render_from_lang.connect("changed", self.set_from_language) render_to_lang = gtk.CellRendererCombo() render_to_lang.set_property("text-column", 0) render_to_lang.set_property("editable", True) render_to_lang.set_property("has-entry", False) render_to_lang.set_property("model", self.model_to_lang) render_to_lang.connect("changed", self.set_to_language) column_from_lang = gtk.TreeViewColumn(_("From language"), render_from_lang, text=0) column_from_lang.set_expand(True) column_to_lang = gtk.TreeViewColumn(_("To language"), render_to_lang, text=1) column_to_lang.set_expand(True) self.append_column(column_from_lang) self.append_column(column_to_lang) self.treeview_model = gtk.ListStore(str, str, str, str) self.set_model(self.treeview_model) self._load_config() def _load_config(self): # Fill the treeview with old configs (if its exists) self.config.read(self.config_path) if self.config.has_option("GoogleTranslate", "languages"): languages = self.config.get("GoogleTranslate", "languages") try: languages = eval(languages) except Exception, e: # TODO: Message return for from_lang_code, to_lang_code in languages: if not from_lang_code: from_lang = "*" else: from_lang = LANGUAGES[from_lang_code] to_lang = LANGUAGES[to_lang_code] model = self.get_model() model.append( [from_lang, to_lang, from_lang_code, to_lang_code])
* Update BetaNews """ import os import sys import time import Image, ImageFont, ImageDraw from ConfigParser import ConfigParser import time import pysvn ### read config cfg = ConfigParser() cfg.read('release.cfg') USER = cfg.get('auth', 'USER') WIKI_PASSWORD = cfg.get('auth', 'WIKI_PASSWORD') VERSION = cfg.get('version', 'VERSION') VER_MAJOR = cfg.get('version', 'VER_MAJOR') VER_MINOR = cfg.get('version', 'VER_MINOR') VER_REVISION = cfg.get('version', 'VER_REVISION') VER_BUILD = cfg.get('version', 'VER_BUILD') PRE_RELEASE_VERSION = 'a' in VERSION or 'b' in VERSION SVN = cfg.get('svn', 'SVN') SVNROOT = cfg.get('svn', 'SVNROOT')
def main(): """ The main function. """ parser = argparse.ArgumentParser( description='Tool to bootload a power module.') parser.add_argument('--address', dest='address', type=int, help='the address of the power module to bootload') parser.add_argument('--all', dest='all', action='store_true', help='bootload all power modules') parser.add_argument('--file', dest='file', help='the filename of the hex file to bootload') parser.add_argument('--8', dest='old', action='store_true', help='bootload for the 8-port power modules') parser.add_argument('--version', dest='version', action='store_true', help='display the version of the power module(s)') parser.add_argument('--verbose', dest='verbose', action='store_true', help='show the serial output') args = parser.parse_args() config = ConfigParser() config.read(constants.get_config_file()) port = config.get('OpenMotics', 'power_serial') power_serial = RS485(Serial(port, 115200)) power_communicator = PowerCommunicator(power_serial, None, time_keeper_period=0, verbose=args.verbose) power_communicator.start() if args.address or args.all: power_controller = PowerController(constants.get_power_database_file()) power_modules = power_controller.get_power_modules() if args.all: for module_id in power_modules: module = power_modules[module_id] addr = module['address'] if args.version: print "E%d - Version: %s" % ( addr, version(addr, power_communicator)) if args.file: if args.old and module['version'] == POWER_API_8_PORTS: bootload_8(addr, args.file, power_communicator, verbose=args.verbose) elif not args.old and module[ 'version'] == POWER_API_12_PORTS: bootload_12(addr, args.file, power_communicator, verbose=args.verbose) else: addr = args.address modules = [ module for module in power_modules.keys() if module['address'] == addr ] if len(modules) != 1: print 'ERROR: Could not determine energy module version. Aborting' sys.exit(1) if args.version: print "E%d - Version: %s" % (addr, version(addr, power_communicator)) if args.file: if args.old and module['version'] == POWER_API_8_PORTS: bootload_8(addr, args.file, power_communicator, verbose=args.verbose) elif not args.old and module['version'] == POWER_API_12_PORTS: bootload_12(addr, args.file, power_communicator, verbose=args.verbose) else: parser.print_help()
from flask import Flask from flask.ext.babel import Babel # Config Section LOCAL_CONFIG_PATH = '%s/../config/config.ini' % os.path.dirname( os.path.realpath(__file__)) PACKAGE_CONFIG_PATH = '/etc/pywebdriver/config.ini' config_file = PACKAGE_CONFIG_PATH if not os.path.isfile(config_file): config_file = LOCAL_CONFIG_PATH assert os.path.isfile(config_file), ( 'Could not find config file (looking at %s and then %s )' % (PACKAGE_CONFIG_PATH, LOCAL_CONFIG_PATH)) config = ConfigParser() config.read(config_file) drivers = {} # Project Import # Application app = Flask(__name__) from . import views from . import plugins # Localization app.config['BABEL_DEFAULT_LOCALE'] = config.get('localization', 'locale') babel = Babel(app) path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
except ImportError as exc: print("Error: failed to import settings module ({})".format(exc)) try: import math except ImportError as exc: print("Error: failed to import settings module ({})".format(exc)) try: from ConfigParser import ConfigParser except ImportError as exc: print("Error: failed to import settings module ({})".format(exc)) #parse config config=ConfigParser() config.read(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'config', 'config.cfg')) def connect(app_name="local"): ''' | Open database connection to *app_name* | Return *conn* object to perform database operations for succesful connection | Return 0 for unsucessful connection ''' host=config.get(app_name,"host") user=config.get(app_name,"user") passwd=config.get(app_name,"passwd") db=config.get(app_name,"db") charset=config.get(app_name,"charset") use_unicode=config.get(app_name,"use_unicode") try: conn=MySQLdb.connect(host,user,passwd,db,charset=charset,use_unicode=use_unicode)
if os.path.basename(os.getcwd()) == 'docs': a_h_path = os.path.abspath(os.path.join('..', 'astropy_helpers')) if os.path.isdir(a_h_path): sys.path.insert(1, a_h_path) # Load all of the global Astropy configuration from astropy_helpers.sphinx.conf import * # Get configuration information from setup.cfg try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser conf = ConfigParser() conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')]) setup_cfg = dict(conf.items('metadata')) # -- General configuration ---------------------------------------------------- extensions = [ 'nbsphinx', 'sphinx.ext.mathjax', ] exclude_patterns = ['_build', '**.ipynb_checkpoints'] # By default, highlight as Python 3. highlight_language = 'python3' # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.2'
from keras.layers import * from keras.layers.recurrent import LSTM from keras.optimizers import RMSprop from keras.callbacks import ModelCheckpoint from datagen import * parser = ArgumentParser() parser.add_argument('-v', type=int, default=0, help='Index of the group used as verification group. Should be 0-9.') parser.add_argument('--debug', action='store_true', help='Use [debug] configuration, instead of [run]') parser.add_argument('--alu', action='store_true', help='Use Alu information to (hopefully) increase accuracy.') args = parser.parse_args() if args.v < 0 or args.v > 9: raise ArgumentError("VERIFICATION_GROUP must be between 0-9") cf = ConfigParser() cf.read('config.ini') sec = 'debug' if args.debug else 'run' TRAIN_MAXLEN = cf.getint(sec, 'train_maxlen') VAL_MAXLEN = cf.getint(sec, 'val_maxlen') BATCH_SIZE = cf.getint(sec, 'batch_size') LEARNING_RATE = cf.getfloat(sec, 'learning_rate') HIDDEN_NODES = cf.getint(sec, 'hidden_nodes') SAMPLE_PER_EPOCH = cf.getint(sec, 'batch_per_epoch') * BATCH_SIZE N_EPOCH = cf.getint(sec, 'n_epoch') N_VAL_SAMPLES = cf.getint(sec, 'n_val_samples') SAMPLE_PER_GROUP = cf.getint(sec, 'sample_per_group') if SAMPLE_PER_GROUP == 0: SAMPLE_PER_GROUP = None POS_RATIO = cf.getfloat(sec, 'pos_ratio') if POS_RATIO == 0: POS_RATIO = None
help="show Database Version", ) db_table = { "kegg": KEGG, "nr": Nr, "eggnog": eggNOG, "swissprot": SwissProt, "ko": KEGG, "uniprot": UNIPROT } if __name__ == "__main__": (options, args) = parser.parse_args() general_config = ConfigParser() redisconfigpath = os.path.split(os.path.abspath(__file__))[0] + '/' general_config.read(os.path.join(redisconfigpath + "database.ini")) ver = options.ver if ver: all_ver = general_config.items("Version") print("Database\tVersion") for dbname, dbver in all_ver: print(dbname + '\t' + dbver) sys.exit() dbname = options.dbname.lower() Database = options.Database if Database: Database = options.Database else: db_already = options.already.lower()
from utils import logger YumUpdate = namedtuple('YumUpdate', ['name', 'version', 'release', 'arch', 'repo']) MetaPackage = namedtuple('MetaPackage', [ 'name', 'version', 'release', 'complete_version', 'nvra', 'arch', 'description', 'hash', 'url', 'release_date', 'size', 'vendor', 'location' ]) Repo = namedtuple('Repo', ['id', 'name', 'url', 'enabled']) yum_cmd = '/usr/bin/yum' _etc_yum = ConfigParser() _etc_yum.read('/etc/yum.conf') _etc_main_section = 'main' """Various helper functions while running yum from a separate process. """ def _yum_vars(): """Gets the yum variables used for normal operation. Such as: - $basearch - $arch - $releasever These variables are crucial for normal yum operation. With out them,
#!/usr/bin/env python # encoding: utf-8 # # Copyright (c) 2010 Doug Hellmann. All rights reserved. # """ """ #end_pymotw_header import argparse from ConfigParser import ConfigParser import shlex parser = argparse.ArgumentParser(description='Short sample app') parser.add_argument('-a', action="store_true", default=False) parser.add_argument('-b', action="store", dest="b") parser.add_argument('-c', action="store", dest="c", type=int) config = ConfigParser() config.read('argparse_with_shlex.ini') config_value = config.get('cli', 'options') print 'Config :', config_value argument_list = shlex.split(config_value) print 'Arg List:', argument_list print 'Results :', parser.parse_args(argument_list)
def get_config(which): config = ConfigParser() path = os.path.join(os.path.dirname(__file__), 'config.ini') config.read(path) return config.get("bdy", which)
def add_overlay_from_config(self, config_file, area_def): """Create and return a transparent image adding all the overlays contained in a configuration file. :Parameters: config_file : str Configuration file name area_def : object Area Definition of the creating image """ config = ConfigParser() try: with open(config_file, 'r'): logger.info("Overlays config file %s found", str(config_file)) config.read(config_file) except IOError: logger.error("Overlays config file %s does not exist!", str(config_file)) raise except NoSectionError: logger.error("Error in %s", str(config_file)) raise # Cache management cache_file = None if config.has_section('cache'): config_file_name, config_file_extention = \ os.path.splitext(config_file) cache_file = (config.get('cache', 'file') + '_' + area_def.area_id + '.png') try: configTime = os.path.getmtime(config_file) cacheTime = os.path.getmtime(cache_file) # Cache file will be used only if it's newer than config file if configTime < cacheTime: foreground = Image.open(cache_file) logger.info('Using image in cache %s', cache_file) return foreground else: logger.info("Cache file is not used " "because config file has changed") except OSError: logger.info("New overlay image will be saved in cache") x_size = area_def.x_size y_size = area_def.y_size foreground = Image.new('RGBA', (x_size, y_size), (0, 0, 0, 0)) # Lines (coasts, rivers, borders) management prj = pyproj.Proj(area_def.proj4_string) if prj.is_latlong(): x_ll, y_ll = prj(area_def.area_extent[0], area_def.area_extent[1]) x_ur, y_ur = prj(area_def.area_extent[2], area_def.area_extent[3]) x_resolution = (x_ur - x_ll) / x_size y_resolution = (y_ur - y_ll) / y_size else: x_resolution = ( (area_def.area_extent[2] - area_def.area_extent[0]) / x_size) y_resolution = ( (area_def.area_extent[3] - area_def.area_extent[1]) / y_size) res = min(x_resolution, y_resolution) if res > 25000: default_resolution = "c" elif res > 5000: default_resolution = "l" elif res > 1000: default_resolution = "i" elif res > 200: default_resolution = "h" else: default_resolution = "f" DEFAULT = { 'level': 1, 'outline': 'white', 'width': 1, 'fill': None, 'fill_opacity': 255, 'outline_opacity': 255, 'x_offset': 0, 'y_offset': 0, 'resolution': default_resolution } SECTIONS = ['coasts', 'rivers', 'borders', 'cities'] overlays = {} for section in config.sections(): if section in SECTIONS: overlays[section] = {} for option in config.options(section): overlays[section][option] = config.get(section, option) is_agg = self._draw_module == "AGG" # Coasts for section, fun in zip( ['coasts', 'rivers', 'borders'], [self.add_coastlines, self.add_rivers, self.add_borders]): if section in overlays: params = DEFAULT.copy() params.update(overlays[section]) params['level'] = int(params['level']) params['x_offset'] = float(params['x_offset']) params['y_offset'] = float(params['y_offset']) params['width'] = float(params['width']) params['outline_opacity'] = int(params['outline_opacity']) params['fill_opacity'] = int(params['fill_opacity']) if section != "coasts": params.pop('fill_opacity', None) params.pop('fill', None) if not is_agg: for key in ['width', 'outline_opacity', 'fill_opacity']: params.pop(key, None) fun(foreground, area_def, **params) logger.info("%s added", section.capitalize()) # Cities management if 'cities' in overlays: DEFAULT_FONT_SIZE = 12 DEFAULT_OUTLINE = "yellow" citylist = [ s.lstrip() for s in overlays['cities']['list'].split(',') ] font_file = overlays['cities']['font'] font_size = int(overlays['cities'].get('font_size', DEFAULT_FONT_SIZE)) outline = overlays['cities'].get('outline', DEFAULT_OUTLINE) pt_size = int(overlays['cities'].get('pt_size', None)) box_outline = overlays['cities'].get('box_outline', None) box_opacity = int(overlays['cities'].get('box_opacity', 255)) self.add_cities(foreground, area_def, citylist, font_file, font_size, pt_size, outline, box_outline, box_opacity) if cache_file is not None: try: foreground.save(cache_file) except IOError as e: logger.error("Can't save cache: %s", str(e)) return foreground
if "jobdependecies" in jobData: jobDeps = eval(jobData["jobdependecies"]) for jDep in jobDeps: if len(jDep) == 2: depName = jDep[0] if "localmode" in jobData and jobData["localmode"]: depConf = os.path.join( os.path.join(localSlavePath, "Jobs", depName, "PandoraJob.ini")) if not os.path.exists(depConf): sys.exit("ERROR - dependent JobIni does not exist %s" % depConf) else: depConfig = ConfigParser() depConfig.read(depConf) if depConfig.has_option( "information", "outputpath") and depConfig.get( "information", "outputpath") != "": depPath = os.path.dirname( depConfig.get("information", "outputpath")) else: sys.exit( "ERROR - dependent JobIni has no outputpath setting exist %s" % depConf) else: depPath = os.path.join( os.path.join(localSlavePath, "RenderOutput", depName)) if not os.path.exists(depPath):
def main(*args): # allow flexibility for testing args = args or sys.argv[1:] login_name = get_login_name() migration_path = os.path.join(os.getcwd(), "migrations") program = os.path.splitext(os.path.split(__file__)[1])[0] parser = argparse.ArgumentParser( prog=program, formatter_class=argparse.RawTextHelpFormatter, usage="""\ %(prog)s [options] <command> A simple generic database migration tool using SQL scripts commands: up Upgrade from a revision to the latest down Downgrade from the latest to a lower revision reset Rollback and re-run to the current revision create Create a migration. Specify "-r 0" to add a new revision """) parser.add_argument(dest='command', choices=('create', 'up', 'down', 'reset')) parser.add_argument("-e", dest="engine", default='sqlite3', choices=('postgres', 'mysql', 'sqlite3'), help="database engine (default: \"sqlite3\")") parser.add_argument( "-r", dest="rev", help="revision to use. specify \"0\" for the next revision if using the " "\"create\" command. (default: last revision)") parser.add_argument( "-m", dest="message", help= "message description for migrations created with the \"create\" command" ) parser.add_argument("-u", dest="user", default=login_name, help="database user name (default: \"%s\")" % login_name) parser.add_argument("-p", dest="password", default='', help="database password.") parser.add_argument("--host", default="localhost", help='database server host (default: "localhost")') parser.add_argument( "--port", help='server port (default: postgres=5432, mysql=3306)') parser.add_argument( "-d", dest="database", default=login_name, help="database name to use. specify a /path/to/file if using sqlite3. " "(default: login name)") parser.add_argument( "--path", default=migration_path, help="path to the migration folder either absolute or relative to the " "current directory. (default: \"./migrations\")") parser.add_argument( "-f", dest='file', metavar='CONFIG', default=".migrate", help="configuration file in \".ini\" format. " "sections represent different configuration environments.\n" "keys include: migration_path, user, password, host, port, " "database, and engine. (default: \".migrate\")") parser.add_argument( "--env", default='dev', help="configuration environment. applies only to config file option " "(default: \"dev\")") parser.add_argument("--debug", action='store_true', default=False, help="print the commands but does not execute.") parser.add_argument( "--skip-errors", default=False, action='store_true', help="continue migration even when some scripts in a revision fail") parser.add_argument("--verbose", dest="verbose", action='store_true', default=False, help="show verbose output.") parser.add_argument('--version', action='version', version='%(prog)s ' + __version__) config = {} args = parser.parse_args(args=args) for name in ('engine', 'command', 'rev', 'password', 'user', 'path', 'env', 'skip_errors', 'host', 'port', 'database', 'file', 'message', 'verbose', 'debug'): config[name] = getattr(args, name) try: if 'file' in config: if os.path.isfile(config['file']): cfg = ConfigParser() cfg.read(config['file']) env = config.get('env', 'dev') for name in ('engine', 'user', 'password', 'migration_path', 'host', 'port', 'database', 'verbose'): if cfg.has_option(env, name): value = cfg.get(env, name) if name == 'migration_path': config['path'] = value if value is not None: config[name] = value elif config['file'] != '.migrate': raise Exception("Couldn't find configuration file: %s" % config['file']) Migrate(**config).run() except MigrateException as e: print(str(e), file=sys.stderr) except Exception as e: print(str(e), file=sys.stderr) parser.print_usage(sys.stderr)
""" This module contains convenience methods for working with the application config. """ from ConfigParser import ConfigParser from os.path import join, dirname, realpath cfg = ConfigParser() cfg.read("app.cfg") supermarket_urls = dict(cfg.items('urls')) supermarket_files = {} working_dir = dirname(realpath(__file__)) for k, v in cfg.items('filenames'): supermarket_files[k] = join(working_dir, v) def supermarket_names(): return supermarket_files.keys() def supermarket_filename(supermarket): return supermarket_files[supermarket] def supermarket_filenames(): return supermarket_files.values() def supermarket_url(supermarket):
def init(): global M_halo, M_disk, M_bulge, M_gas global N_halo, N_disk, N_bulge, N_gas global a_halo, a_bulge, gamma_halo, gamma_bulge, Rd, z0, z0_gas global halo_cut_r, halo_cut_M, bulge_cut_r, bulge_cut_M global disk_cut_r, disk_cut global N_total, M_total global phi_grid, rho_axis, z_axis, N_rho, Nz global N_CORES, force_yes, force_no, output, input_, gas, bulge, factor, Z global file_format flags = parser(description="Generates an initial conditions file for a\ galaxy simulation with halo, stellar disk,\ gaseous disk and bulge components.") flags.add_argument('-cores', help='The number of cores to use during the\ potential canculation. Make sure this\ number is a factor of N_rho*N_z. Default\ is 1.', default=1) flags.add_argument('--force-yes', help='Don\'t ask if you want to use the\ existing potential_data.txt file.\ Useful for automating the execution\ of the script.', action='store_true') flags.add_argument('--force-no', help='Same as above, but with the opposite\ effect.', action='store_true') flags.add_argument('--hdf5', help='Output initial conditions in HDF5\ format.', action='store_true') flags.add_argument('-o', help='The name of the output file.', metavar="init.dat", default="init.dat") flags.add_argument('-i', help='The name of the .ini file.', metavar="params_galaxy.ini", default="params_galaxy.ini") args = flags.parse_args() N_CORES = int(args.cores) force_yes = args.force_yes force_no = args.force_no output = args.o input_ = args.i if args.hdf5: file_format = 'hdf5' else: file_format = 'gadget2' if not path.isfile(input_): print("Input file not found:", input_) exit(0) config = ConfigParser() config.read(input_) # Halo M_halo = config.getfloat('halo', 'M_halo') a_halo = config.getfloat('halo', 'a_halo') N_halo = config.getint('halo', 'N_halo') gamma_halo = config.getfloat('halo', 'gamma_halo') halo_cut_r = config.getfloat('halo', 'halo_cut_r') # Disk M_disk = config.getfloat('disk', 'M_disk') N_disk = config.getint('disk', 'N_disk') Rd = config.getfloat('disk', 'Rd') z0 = config.getfloat('disk', 'z0') factor = config.getfloat('disk', 'factor') disk_cut_r = config.getfloat('disk', 'disk_cut_r') # Bulge bulge = config.getboolean('bulge', 'include') M_bulge = config.getfloat('bulge', 'M_bulge') a_bulge = config.getfloat('bulge', 'a_bulge') N_bulge = config.getint('bulge', 'N_bulge') gamma_bulge = config.getfloat('bulge', 'gamma_bulge') bulge_cut_r = config.getfloat('bulge', 'bulge_cut_r') # Gas gas = config.getboolean('gas', 'include') M_gas = config.getfloat('gas', 'M_gas') N_gas = config.getint('gas', 'N_gas') z0_gas = config.getfloat('gas', 'z0_gas') Z = config.getfloat('gas', 'Z') z0_gas *= z0 if not gas: N_gas = 0 if not bulge: N_bulge = 0 M_total = M_disk + M_bulge + M_halo + M_gas N_total = N_disk + N_bulge + N_halo + N_gas N_rho = config.getint('global', 'N_rho') Nz = config.getint('global', 'Nz') phi_grid = np.zeros((N_rho, Nz)) rho_max = config.getfloat('global', 'rho_max') * a_halo z_max = config.getfloat('global', 'z_max') * a_halo rho_axis = np.logspace(-2, log10(rho_max), N_rho) z_axis = np.logspace(-2, log10(z_max), Nz)
total += len(keys) sec = get_var(conf, sec, "template") if sec: dbf = get_fields(conf, sec) db_fields = dbf if len(dbf) else db_fields for key in keys: parse(conf, sec, key, parent) def parse(conf, sec, key="index", parent=""): global db_fields if parent == "": parent = sec url = get_var(conf, sec, "url").replace("$1", key) text = get_page(parent, url, sec + '_' + key) alias = get_var(conf, sec, "alias") if alias: process(conf, alias, text, parent, url) dbf = get_fields(conf, alias) db_fields = dbf if len(dbf) else db_fields get_values(conf, sec, text, url) process(conf, sec, text, parent, url, key) if __name__ == '__main__': conf = ConfigParser() conf.read("megaparser.ini") section = get_var(conf, "settings", "default") parse(conf, section) dump_db()
for opt, arg in opts: if opt in ("-h", "--help"): print usage sys.exit() elif opt in ("-d", "--debug"): debug = 1 elif opt in ("-v", "--verbose"): info = 1 elif opt in ("-c", "--config"): configfile = arg elif opt in ("-n", "--nodaemon"): disconnect = 0 # Read in config file config = ConfigParser() config.read(['config/vihuela.conf', default_config, configfile]) # Set up logging. FORMAT = "%(asctime)s [ %(levelname)s ] %(message)s" logging.basicConfig(format=FORMAT) log = logging.getLogger() loglev = config.get('daemon', 'loglevel').lower() if loglev == 'debug': log.setLevel(logging.DEBUG) elif loglev == 'info': log.setLevel(logging.INFO) elif loglev == 'warn': log.setLevel(logging.WARN) # Override with command line switches
#!/usr/bin/env python import sys from ConfigParser import ConfigParser, NoSectionError, NoOptionError if len(sys.argv) < 2 or len(sys.argv) > 4: print "Usage: config_helper <file> | config_helper <file> name value" sys.exit(1) config_file = sys.argv[1] cfgparser = ConfigParser() cfgparser.optionxform = str cfgparser.read(config_file) if len(sys.argv) == 2: try: for name, value in cfgparser.items('General'): print "%s='%s'" % (name, value) except NoSectionError: pass elif len(sys.argv) == 3: try: print cfgparser.get('General', sys.argv[2]) except NoSectionError: pass except NoOptionError: pass else: if not cfgparser.has_section('General'): cfgparser.add_section('General')
# check for a running update process, this may take a while so it's better to check... try: lck = open('/tmp/suricata-rule-updater.py', 'w+') fcntl.flock(lck, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError: # already running, exit status 99 sys.exit(99) if __name__ == '__main__': # load list of configured rules from generated config enabled_rulefiles = dict() rule_properties = dict() updater_conf = '/usr/local/etc/suricata/rule-updater.config' if os.path.exists(updater_conf): cnf = ConfigParser() cnf.read(updater_conf) for section in cnf.sections(): if section == '__properties__': # special section, rule properties (extend url's etc.) for item in cnf.items(section): rule_properties[item[0]] = item[1] elif cnf.has_option(section, 'enabled') and cnf.getint( section, 'enabled') == 1: enabled_rulefiles[section.strip()] = {} # input filter if cnf.has_option(section, 'filter'): enabled_rulefiles[section.strip()]['filter'] = cnf.get( section, 'filter').strip() else: enabled_rulefiles[section.strip()]['filter'] = ""
def config_load(self): global_config_file = os.getenv( 'HOME') + '/.games_nebula/config/config.ini' global_config_parser = ConfigParser() global_config_parser.read(global_config_file) gtk_theme = global_config_parser.get('visuals', 'gtk_theme') gtk_dark = global_config_parser.getboolean('visuals', 'gtk_dark') icon_theme = global_config_parser.get('visuals', 'icon_theme') font = global_config_parser.get('visuals', 'font') screen = Gdk.Screen.get_default() gsettings = Gtk.Settings.get_for_screen(screen) gsettings.set_property('gtk-theme-name', gtk_theme) gsettings.set_property('gtk-application-prefer-dark-theme', gtk_dark) gsettings.set_property('gtk-icon-theme-name', icon_theme) gsettings.set_property('gtk-font-name', font) if not global_config_parser.has_section('goglib preferences'): global_config_parser.add_section('goglib preferences') if not global_config_parser.has_option('goglib preferences', 'goglib_download_dir'): goglib_download_dir = os.getenv( 'HOME') + '/.games_nebula/games/goglib/downloads' global_config_parser.set('goglib preferences', 'goglib_download_dir', str(goglib_download_dir)) else: goglib_download_dir = global_config_parser.get( 'goglib preferences', 'goglib_download_dir') if not global_config_parser.has_section('emulation settings'): global_config_parser.add_section('emulation settings') if not global_config_parser.has_option('emulation settings', 'winetricks_cache'): self.winetricks_cache = os.getenv('HOME') + '/.cache/winetricks' global_config_parser.set('emulation settings', 'winetricks_cache', str(self.winetricks_cache)) else: self.winetricks_cache = global_config_parser.get( 'emulation settings', 'winetricks_cache') if not global_config_parser.has_option('emulation settings', 'winetricks_cache_backup'): self.winetricks_cache_backup = goglib_download_dir + '/_winetricks_cache_backup' global_config_parser.set('emulation settings', 'winetricks_cache_backup', str(self.winetricks_cache_backup)) else: self.winetricks_cache_backup = global_config_parser.get( 'emulation settings', 'winetricks_cache_backup') if not os.path.exists(self.winetricks_cache): os.makedirs(self.winetricks_cache) if not os.path.exists(self.winetricks_cache_backup): os.makedirs(self.winetricks_cache_backup) config_file = open( os.getenv('HOME') + '/.games_nebula/config/config.ini', 'w') global_config_parser.write(config_file) config_file.close()
class SaltAPI(object): def __init__(self): self.s = requests.session() self.c = ConfigParser(allow_no_value=True) self.c.read(os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'cmdb.conf')) self.url = self.c.get('salt', 'url') def getToken(self): login_url = '{0}/login'.format(self.url) dic = { 'username': self.c.get('salt', 'user'), 'password': self.c.get('salt', 'password'), 'eauth': self.c.get('salt', 'eauth') } result = self.s.post(login_url, json=dic, verify=False).text return json.loads(result)['return'][0]['token'] def header(self): headers = { 'Content-Type': 'application/x-www-form-urlencoded', 'Accept': 'application/x-yaml', 'X-Auth-Token': self.getToken() } return headers def cmd(self, ip, command): dic = { 'client': 'local', 'tgt': ip, 'fun': 'cmd.run', 'arg': [command] } # dic = { # 'client': 'runner', # 'fun': 'getresult.get', # 'arg': [ip, 'cmd.run', command] # } response = self.s.post(self.url, headers=self.header(), data=dic, verify=False).text return response def minion(self, ip, release): dic = { 'client': 'runner', 'fun': release, 'arg': ip, } response = self.s.post(self.url, headers=self.header(), data=dic, verify=False).text return response def pushFile(self, ip, source, dest): dic = { 'client': 'runner', 'fun': 'getresult.get', 'arg': [ip, 'cp.get_file', "{0}, {1}".format('salt://' + source, dest)] } # dic = { # 'client': 'local', # 'tgt': ip, # 'fun': 'cp.get_file', # 'arg': ['salt://' + source, dest] # } response = self.s.post(self.url, headers=self.header(), data=dic, verify=False).text return response def getReuslt(self, jid): headers = { 'Accept': 'application/x-yaml', 'X-Auth-Token': self.getToken() } url = self.url + '/jobs/' + jid response = self.s.get(url, headers=headers, verify=False).text return response def exeScript(self, ip, filename): dic = { 'client': 'runner', 'fun': 'getresult.get', 'arg': [ip, 'cmd.script', '{0}'.format('salt://' + filename)] } response = self.s.post(self.url, headers=self.header(), data=dic, verify=False).text return response
def get_config(self, name): """Read a config value from the config.ini file""" config = ConfigParser() config.read(self.cfg_path) return config.get('login', name)
# GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # from itertools import izip import numpy import pychips as chips from sherpa.utils import get_keyword_defaults from sherpa import get_config from ConfigParser import ConfigParser, NoSectionError config = ConfigParser() config.read(get_config()) __all__ = ('clear_window', 'plot', 'histo', 'contour', 'point', 'set_subplot', 'get_split_plot_defaults', 'get_confid_point_defaults', 'get_plot_defaults', 'get_point_defaults', 'begin', 'end', 'get_data_plot_defaults', 'get_model_plot_defaults', 'get_fit_plot_defaults', 'get_resid_plot_defaults', 'get_ratio_plot_defaults', 'get_contour_defaults', 'exceptions', 'get_data_contour_defaults', 'get_model_contour_defaults', 'get_fit_contour_defaults', 'get_resid_contour_defaults', 'get_ratio_contour_defaults', 'get_confid_plot_defaults', 'get_confid_contour_defaults', 'set_window_redraw', 'set_jointplot', 'get_histo_defaults', 'get_model_histo_defaults', 'get_component_plot_defaults', 'get_component_histo_defaults', 'vline', 'hline', 'get_cdf_plot_defaults', 'get_scatter_plot_defaults', 'get_latex_for_string')
def daka(self): signHeaders = { "Accept": "application/json, text/javascript, */*; q=0.01", #"Accept-Encoding":"gzip, deflate, sdch", "Accept-Language": "zh-CN,zh;q=0.8", "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.66 Safari/537.36 LBBROWSER", "Host": "readcolor.com", } SIGN_URL = "http://readcolor.com/users/sign" def respParserFunc(resp): resp = json.loads(resp.read()) return (1, u"打卡成功!") if resp["succeed"] else (0, "{}".format(resp)) self._daka(SIGN_URL, headers=signHeaders, respParserFunc=respParserFunc) if __name__ == '__main__': #read configuration file configFile = os.path.join(sys.path[0], "user_config.ini") cf = ConfigParser() cf.read(configFile) username = cf.get("readcolor", "username") pwd = cf.get("readcolor", "password") rc = Readcolor() rc.login(username, pwd) rc.daka()
class _AuthConf(object): """ Parser of repoze.what-quickstart configuration files. """ def __init__(self, path_to_conf): """ :param path_to_conf: The path to the repoze.what-quickstart configuration file. """ root_dir = path.dirname(path_to_conf) self.parser = ConfigParser(defaults={'here': root_dir}) self.parser.read(path_to_conf) self.options = {'translations': {}} def find_options(self): """ Parse the configuration file and extract the auth options. :raises BadOptionError: If an option has an invalid value. """ # Adding general options: self._add_object("general", "dbsession") self._add_string("general", "charset") # Adding authentication options: self._add_object("authentication", "user_class") self._add_object("authentication", "form_plugin") self._add_boolean("authentication", "form_identifies") self._add_string("authentication", "cookie_name") self._add_string("authentication", "cookie_secret") self._add_integer("authentication", "cookie_timeout") self._add_integer("authentication", "cookie_reissue_time") self._add_string("authentication", "login_url") self._add_string("authentication", "login_handler") self._add_string("authentication", "post_login_url") self._add_string("authentication", "logout_handler") self._add_string("authentication", "post_logout_url") self._add_string("authentication", "login_counter_name") self._add_string("authentication", "log_level") self._add_string("authentication", "log_file") self._add_boolean("authentication", "skip_authentication") # Adding authorization options: self._add_object("authorization", "group_class") self._add_object("authorization", "permission_class") # Adding translations: for translation in _SUPPORTED_TRANSLATIONS: if self.parser.has_option("translations", translation): value = self.parser.get("translations", translation) self.options['translations'][translation] = value def _add_string(self, section, option): """ Add the ``option`` if it's defined. """ if self.parser.has_option(section, option): value = self.parser.get(section, option) self.options[option] = value def _add_boolean(self, section, option): """ Add the ``option`` as a boolean if it's defined. """ if self.parser.has_option(section, option): try: value = self.parser.getboolean(section, option) except ValueError: value = self.parser.get(section, option) raise BadOptionError('Option %s ("%s") is not a boolean' % (option, value)) else: self.options[option] = value def _add_integer(self, section, option): """ Add the ``option`` as a integer if it's defined. """ if self.parser.has_option(section, option): try: value = self.parser.getint(section, option) except ValueError: value = self.parser.get(section, option) raise BadOptionError('Option %s ("%s") is not an integer' % (option, value)) else: self.options[option] = value def _add_object(self, section, option): """ Resolve object ``option`` if it's defined and load it to the options. :raises BadOptionError: If the value of ``option`` cannot be resolved. """ if self.parser.has_option(section, option): value = self.parser.get(section, option) try: object_ = EntryPoint.parse("x=%s" % value).load(False) except (ValueError, ImportError): raise BadOptionError('Option %s ("%s") cannot be resolved' % (option, value)) else: self.options[option] = object_
class New_Common(object): """docstring for New_Common""" def __init__(self): super(New_Common, self).__init__() self.pwd = os.path.dirname(sys.argv[0]) self.configfile = self.pwd + '\\config.ini' if os.path.isfile(self.configfile) == False: print u"%s\\config.ini 文件不存在" % (self.pwd) self.pause() sys.exit(10) def getvar(self): self.config = ConfigParser() self.config.read(self.configfile) self.spid = self.config.get('common', 'spid') self.spguid = self.config.get('common', 'spguid') self.port = self.config.get('common', 'port') self.loggerip = self.config.get('common', 'loggerip') self.source_spid = self.config.get('common', 'source_spid') self.session_ip = self.config.get('common', 'session_ip') #self.config.close() def check(self): print u"自动配置NameServer,LoggerServer,SessionServer" self.pause() self.getvar() os.chdir('D:\\') self.NameServerdir = 'NameServer_' + self.source_spid self.LoggerServerdir = 'LoggerServer_' + self.source_spid self.SessionServerdir = 'SessionServer_' + self.source_spid for x in self.NameServerdir, self.LoggerServerdir, self.SessionServerdir: if os.path.isdir(x) == False: print u'%s目录不存在' % (x) self.pause() sys.exit(10) self.logerport = int('340' + self.port) self.nameport = int('350' + self.port) self.sessionport = int('310' + self.port) for x in self.logerport, self.nameport, self.sessionport: self.conn(x) def conn(self, port='1', host="127.0.0.1"): self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: self.s.connect((host, port)) print u"%s端口已经使用了" % (port) self.s.close() self.pause() sys.exit(10) except: self.s.close() def copy(self): os.chdir('D:\\') self.NEW_NameServerdir = 'NameServer_' + self.spid self.NEW_LoggerServerdir = 'LoggerServer_' + self.spid self.NEW_SessionServerdir = 'SessionServer_' + self.spid nowtime = time.strftime("%Y-%m-%d_%H-%M-%S") for x in self.NEW_NameServerdir, self.NEW_LoggerServerdir, self.NEW_SessionServerdir: if os.path.isdir(x): shutil.move(x, 'backup\\' + nowtime + '\\') print u"移动%s到%s" % (x, 'backup\\' + nowtime + '\\') print u"复制%s" % (self.NEW_NameServerdir) shutil.copytree(self.NameServerdir, self.NEW_NameServerdir) print u"复制%s" % (self.NEW_LoggerServerdir) shutil.copytree(self.LoggerServerdir, self.NEW_LoggerServerdir) print u"复制%s" % (self.NEW_SessionServerdir) shutil.copytree(self.SessionServerdir, self.NEW_SessionServerdir) def change(self): self.copy() print u"修改%s NameServer配置文件" % (self.spid) shutil.copy(self.NEW_NameServerdir + "\\NameServer.txt", self.NEW_NameServerdir + "\\NameServer.txt.bak") shutil.copy(self.NEW_LoggerServerdir + "\\LoggerServer.txt", self.NEW_LoggerServerdir + "\\LoggerServer.txt.bak") shutil.copy(self.NEW_SessionServerdir + "\\SessionServer.txt", self.NEW_SessionServerdir + "\\SessionServer.txt.bak") result1 = [] with open(self.NEW_NameServerdir + "\NameServer.txt", 'rU') as myfile: for line in myfile.readlines(): if line.lstrip(" ").startswith("ServerName"): line = line.replace(self.source_spid, self.spid) if line.lstrip(" ").startswith("spguid"): line = """ spguid = %s,""" % (self.spguid) if line.lstrip(" ").startswith("Port"): oldport = line.strip("\r\n").strip(",\" ").replace( "\",", "").split("=")[-1].strip(" ") if int(oldport) >= 21000: newport = oldport[:3] + self.port line = " Port = %s" % (newport) oldport = None #print newport if line.lstrip(" ").startswith("Host"): line = """ Host = "%s",""" % (self.session_ip) if line.lstrip(" ").startswith("DBName"): line = """ DBName = "cq_name%s",""" % (self.spguid) line = line.decode('GBK').encode('gbk') result1.append(line.strip("\r\n")) open(self.NEW_NameServerdir + "\NameServer.txt", 'w').write("\n".join(result1)) print u"修改%s LoggerServer配置文件" % (self.spid) result2 = [] num1 = 0 num = 0 with open(self.NEW_LoggerServerdir + "\LoggerServer.txt", 'rU') as myfile: for line in myfile.readlines(): if line.lstrip(" ").startswith("Host"): line = """ Host = "%s",""" % (self.loggerip) if line.lstrip(" ").startswith("DBName"): line = """ DBName = "cq_log%s",""" % (self.spguid) if line.lstrip(" ").startswith("Port"): oldport = re.sub("[^0-9]", "", line) if oldport >= 21000: newport = oldport[:3] + self.port num1 += 1 if num1 == 2: line = """ Port = "%s",""" % (newport) else: line = """ Port = %s,""" % (newport) oldport = None if line.lstrip(" ").startswith("ServiceName"): num += 1 if line.lstrip(" ").startswith("ServiceName") and num == 2: line = line.replace(self.source_spid, self.spid) line = line.decode('GBK').encode('gbk') result2.append(line.strip("\r\n")) open(self.NEW_LoggerServerdir + "\LoggerServer.txt", 'w').write("\n".join(result2)) print u"修改%s SessionServer配置文件" % (self.spid) result3 = [] num = 0 with open(self.NEW_SessionServerdir + "\SessionServer.txt", 'r') as myfile: for line in myfile.readlines(): if line.lstrip(" ").startswith("ServerName"): line = """ ServerName = "[%s]武易会话服务",""" % (self.spid) if line.lstrip(" ").startswith("spguid"): line = """ spguid = %s,""" % (self.spguid) if line.lstrip(" ").startswith("SPID"): line = """ SPID = "%s",,""" % (self.spid) if line.lstrip(" ").startswith("Port"): oldport = line.strip("\r\n").strip(",\" ").replace( "\",", "").split("=")[-1].strip(" ") if int(oldport) >= 21000: newport = oldport[:3] + self.port line = " Port = %s," % (newport) oldport = None if line.lstrip(" ").startswith("Host"): num += 1 if line.lstrip(" ").startswith("Host") and num == 3: line = """ Host = "%s",""" % (self.session_ip) if line.lstrip(" ").startswith("DBName"): line = """ DBName = "cq_account%s",""" % (self.spguid) line = line.decode('GBK').encode('gbk') result3.append(line.strip("\r\n")) open(self.NEW_SessionServerdir + "\\SessionServer.txt", 'w').write("\n".join(result3)) self.pause() def pause(self): os.system('pause')
class Config: """A configuration registry.""" def __init__(self, prototype, fileName=None): """ @param prototype: The configuration protype mapping @param fileName: The file that holds this configuration registry """ self.prototype = prototype # read configuration self.config = ConfigParser() if fileName: if not os.path.isfile(fileName): path = Resource.getWritableResourcePath() fileName = os.path.join(path, fileName) self.config.read(fileName) self.fileName = fileName # fix the defaults and non-existing keys for section, options in prototype.items(): if not self.config.has_section(section): self.config.add_section(section) for option in options.keys(): type = options[option].type default = options[option].default if not self.config.has_option(section, option): self.config.set(section, option, str(default)) def get(self, section, option): """ Read a configuration key. @param section: Section name @param option: Option name @return: Key value """ try: type = self.prototype[section][option].type default = self.prototype[section][option].default except KeyError: Log.warn("Config key %s.%s not defined while reading." % (section, option)) type, default = str, None value = self.config.has_option(section, option) and self.config.get( section, option) or default if type == bool: value = str(value).lower() if value in ("1", "true", "yes", "on"): value = True else: value = False else: value = type(value) #Log.debug("%s.%s = %s" % (section, option, value)) return value def set(self, section, option, value): """ Set the value of a configuration key. @param section: Section name @param option: Option name @param value: Value name """ try: prototype[section][option] except KeyError: Log.warn("Config key %s.%s not defined while writing." % (section, option)) if not self.config.has_section(section): self.config.add_section(section) if type(value) == unicode: value = value.encode(encoding) else: value = str(value) self.config.set(section, option, value) f = open(self.fileName, "w") self.config.write(f) f.close()