def process_l10n_ini(inifile): """Read a Mozilla l10n.ini file and process it to find the localisation files needed by a project""" l10n = ConfigParser() l10n.readfp(open(path_neutral(inifile))) l10n_ini_path = os.path.dirname(inifile) for dir in l10n.get('compare', 'dirs').split(): frompath = os.path.join(l10n_ini_path, l10n.get('general', 'depth'), dir, 'locales', 'en-US') if verbose: print '%s -> %s' % (frompath, os.path.join(l10ncheckout, 'en-US', dir)) try: shutil.copytree(frompath, os.path.join(l10ncheckout, 'en-US', dir)) except OSError: print 'ERROR: %s does not exist' % frompath try: for include in l10n.options('includes'): include_ini = os.path.join( l10n_ini_path, l10n.get('general', 'depth'), l10n.get('includes', include) ) if os.path.isfile(include_ini): process_l10n_ini(include_ini) except TypeError: pass except NoSectionError: pass
def version(self, extended=False): """ What version is cobbler? If extended == False, returns a float for backwards compatibility If extended == True, returns a dict: gitstamp -- the last git commit hash gitdate -- the last git commit date on the builder machine builddate -- the time of the build version -- something like "1.3.2" version_tuple -- something like [ 1, 3, 2 ] """ config = ConfigParser() config.read("/etc/cobbler/version") data = {} data["gitdate"] = config.get("cobbler", "gitdate") data["gitstamp"] = config.get("cobbler", "gitstamp") data["builddate"] = config.get("cobbler", "builddate") data["version"] = config.get("cobbler", "version") # dont actually read the version_tuple from the version file data["version_tuple"] = [] for num in data["version"].split("."): data["version_tuple"].append(int(num)) if not extended: # for backwards compatibility and use with koan's comparisons elems = data["version_tuple"] return int(elems[0]) + 0.1 * int(elems[1]) + 0.001 * int(elems[2]) else: return data
def process_config(directory, config_file, sp): config = ConfigParser() config.read(config_file) sections = config.sections() db_or_element = db_or_element_format(sp, sections) if db_or_element == "db": invalid_sections = fc.invalid_config_sections(directory, config_file, sp.full_header_data("db")) elif db_or_element == "element": invalid_sections = fc.invalid_config_sections(directory, config_file, sp.full_header_data("element")) else: return "error" for s in sections: fname = config.get(s, "file_name") header = config.get(s, "header") if s in invalid_sections: if os.path.exists(directory + fname): os.remove(directory + fname) else: with open(directory + s + "_temp.txt", "w") as w: w.write(header + "\n") with open(directory + fname, "r") as r: for line in r: w.write(line) os.remove(directory + fname) os.rename(directory + s + "_temp.txt", directory + fname) os.remove(config_file) if len(invalid_sections) == 0: return None return {"invalid_sections":invalid_sections}
def main(): options, args = parser.parse_args() if len(args) == 1 and args[0].endswith('.odt'): args.append(args[0][:-4]) if len(args) == 2: filename, targetdir = args convert_odt(filename, targetdir, debug=options.debug, options={ 'download_source_link': options.download_source_link }) elif len(args) == 1: configname = os.path.abspath(args[0]) configdir = os.path.dirname(configname) config = ConfigParser() config.read(configname) for section in config.sections(): filename = config.has_option(section, 'filename') and \ config.get(section, 'filename') or section filename = os.path.normpath( os.path.join(configdir, filename)) targetdir = config.has_option(section, 'targetdir') and \ config.get(section, 'targetdir') or '.' targetdir = os.path.normpath( os.path.join(configdir, targetdir)) print "Converting %s in %s" % (filename, targetdir) convert_odt(filename, targetdir, debug=options.debug, options={'download_source_link': options.download_source_link})
def getMysqlConfig(self, db = 'test'): """获取mysql连接配置 - 依赖配置文件[conf/config.ini],节点[db] Returns: dbconfig dict. """ try: cf = ConfigParser() cf.read('conf/config.ini') dbconfig = { 'host': cf.get('db', 'host'), 'port': cf.getint('db', 'port'), 'user': cf.get('db', 'user'), 'passwd': cf.get('db', 'passwd'), 'db': db } return dbconfig except Exception as e: error = """Can't load config from [conf/config.ini] or [db] node doesn't exist.\n Please make sure this file.""" logging.warning(error) print(error) raise Exception(e)
class Config(object): _raw_options = ('fetch_command', 'player_command') _options = ('media_dir',) _expanduser = ('media_dir',) def __init__(self, my_file): my_file = os.path.expanduser(my_file) if not os.path.exists(my_file): with codecs.open(my_file, 'w', encoding='utf-8') as fp: fp.write(config_file) raise MarrieError( 'Missing config file: %s. It will be created for you.' % my_file) self._cp = ConfigParser() self._cp.read(my_file) for opt in (self._raw_options + self._options): if not self._cp.has_option('config', opt): raise MarrieError('Missing needed config option: config:%s' \ % opt) def __getattr__(self, attr): opt = None if attr in self._raw_options: opt = self._cp.get('config', attr, True) elif attr in self._options: opt = self._cp.get('config', attr) elif attr == 'podcast': opt = OrderedDict(self._cp.items('podcast')) if opt is None: raise AttributeError(attr) if attr in self._expanduser and not isinstance(opt, dict): return os.path.expanduser(opt) return opt
def _load_object_post_as_copy_conf(self, conf): if ('object_post_as_copy' in conf or '__file__' not in conf): # Option is explicitly set in middleware conf. In that case, # we assume operator knows what he's doing. # This takes preference over the one set in proxy app return cp = ConfigParser() if os.path.isdir(conf['__file__']): read_conf_dir(cp, conf['__file__']) else: cp.read(conf['__file__']) try: pipe = cp.get("pipeline:main", "pipeline") except (NoSectionError, NoOptionError): return proxy_name = pipe.rsplit(None, 1)[-1] proxy_section = "app:" + proxy_name try: conf['object_post_as_copy'] = cp.get(proxy_section, 'object_post_as_copy') except (NoSectionError, NoOptionError): pass
def config(): global video_format global resolution configr = ConfigParser() configr.read('settings.ini') quality = configr.get('SETTINGS', 'video_quality') qualities = {'android': ['107', '71'], '360p': ['106', '60'], '480p': ['106', '61'], '720p': ['106', '62'], '1080p': ['108', '80'], 'highest': ['0', '0']} video_format = qualities[quality][0] resolution = qualities[quality][1] global lang global lang2 lang = configr.get('SETTINGS', 'language') lang2 = configr.get('SETTINGS', 'language2') langd = {'Espanol_Espana': u'Español (Espana)', 'Francais': u'Français (France)', 'Portugues': u'Português (Brasil)', 'English': u'English', 'Espanol': u'Español', 'Turkce': u'Türkçe', 'Italiano': u'Italiano', 'Arabic': u'العربية', 'Deutsch': u'Deutsch'} lang = langd[lang] lang2 = langd[lang2] forcesub = configr.getboolean('SETTINGS', 'forcesubtitle') global forceusa forceusa = configr.getboolean('SETTINGS', 'forceusa') global localizecookies localizecookies = configr.getboolean('SETTINGS', 'localizecookies') onlymainsub = configr.getboolean('SETTINGS', 'onlymainsub') return [lang, lang2, forcesub, forceusa, localizecookies, quality, onlymainsub]
def get_value(self, section, option): """ Retourne la valeur de l'option contenue dans la section passée en paramètre. """ # On travaille sur le nom de la section parente # au cas où s'il s'agit d'une sous-section. parent_section = self.get_parent_section_name(section) # On vérifie d'abord que la section existe. if self.__spec_sections.has_key(parent_section): # Puis on récupère la spécification de la section. section_spec = self.__spec_sections.get(parent_section) option_type = None # On parcours les options de la spécification à la recherche # du type de la valeur de l'option que l'on souhaite obtenir. for option_spec in section_spec[2]: if option_spec[0] == option: option_type = option_spec[1] # Introuvable dans les options de la section ? # On regarde dans ceux de la sous-section si elle existe. if self.__spec_has_subsection(parent_section): for sub_option_spec in section_spec[3]: if sub_option_spec[0] == option: option_type = sub_option_spec[1] # On appelle la fonction qui va bien en fonction du type à obtenir. # # Les sous-sections héritent des options de leur section parente. # Si l'option n'existe pas dans la section, il doit sûrement s'agir # d'une sous-section. On cherche alors l'option dans la section # parente. if option_type == 'string': try: return ConfigParser.get(self, section, option) except NoOptionError: return ConfigParser.get(self, parent_section, option) if option_type == 'int': try: return ConfigParser.getint(self, section, option) except NoOptionError: return ConfigParser.getint(self, parent_section, option) if option_type == 'bool': try: return ConfigParser.getboolean(self, section, option) except NoOptionError: return ConfigParser.getboolean(self, parent_section, option) return None else: raise NameError("Invalid section name: '%(section)s'." % \ {'section': section})
def start(self): from scheduler import Tree loog = self.addLog("stdio") self.pending = 0 properties = self.build.getProperties() self.rendered_tree = tree = properties.render(self.treename) l10nbuilds = properties.render(self.l10nbuilds) cp = ConfigParser() cp.read(l10nbuilds) repo = cp.get(tree, "repo") branch = cp.get(tree, "mozilla") path = cp.get(tree, "l10n.ini") l10nbranch = cp.get(tree, "l10n") locales = cp.get(tree, "locales") if locales == "all": alllocales = "yes" else: alllocales = "no" properties.update({"locales": filter(None, locales.split())}, "Build") self.tree = Tree(self.rendered_tree, repo, branch, l10nbranch, path) loog.addStdout("Loading l10n.inis for %s\n" % self.rendered_tree) logger.debug( "scheduler.l10n.tree", "Loading l10n.inis for %s, alllocales: %s" % (self.rendered_tree, alllocales) ) self.loadIni(repo, branch, path, alllocales)
def _set_config_all(self, path_to_file): out = sys.stdout if not os.access(path_to_file, os.R_OK): self.log.warning( "cannot access file %s" % path_to_file ) return elif not self.env.config: self.log.warning( "cannot access config file trac.ini" ) return cfg = ConfigParser() cfg.read(path_to_file) if os.access(self.env.path, os.W_OK): path_to_trac_ini = os.path.join(self.env.path, 'conf', 'trac.ini') shutil.copy(path_to_trac_ini, path_to_trac_ini + '.bak') out.write( "created a backup of trac.ini to %s.bak" % path_to_trac_ini ) out.write('\n') else: out.write( "could not create backup of trac.ini - continue anyway? [y|n] " ) input = sys.stdin.readline() if not input or not input.strip() == 'y': return for sect in cfg.sections(): for opt in cfg.options(sect): self.config.set(sect, opt, cfg.get(sect, opt)) out.write( "added config [%s] %s = %s" % (sect, opt, cfg.get(sect, opt)) ) out.write('\n') self.config.save()
class ConfigReader(object): """ 为傲世堂的游戏项目配置文件定制的配置读取类。 陈超写的arg.gameOption耦合性太强,只能在bible内使用。 但是配置文件的结构设计的很合理。 此类就是根据原来的结构设计重新写的解耦并且适用性更广的类。 Example:: conf = ConfigReader(game, region) ip = conf.get("mobile_www_ip") if conf.has_option("mobile_www_port") port = conf.getint("mobile_www_port") """ def __init__(self, game, section, conf_dir='/app/opbin/work/bible/conf'): self.game = game self.section = section self.conf_file = '{}/{}.conf'.format(conf_dir.rstrip('/'), self.game) self.config = ConfigParser() self.config.read(self.conf_file) self.has_section = self.config.has_section(self.section) def has_option(self, option): return self._has_option(self.section, option) or self._has_option('common', option) def _has_option(self, section, option): return self.config.has_option(section, option) def get(self, option, raw=0, var=None): if self._has_option(self.section, option): return self.config.get(self.section, option, raw, var) elif self._has_option('common', option): return self.config.get('common', option, raw, var) else: raise Exception("Can't find option: {} in {}".format(option, self.conf_file)) def getint(self, option): if self._has_option(self.section, option): return self.config.getint(self.section, option) elif self._has_option('common', option): return self.config.getint('common', option) else: raise Exception("Can't find option: {} in {}".format(option, self.conf_file)) def getfloat(self, option): if self._has_option(self.section, option): return self.config.getfloat(self.section, option) elif self._has_option('common', option): return self.config.getfloat('common', option) else: raise Exception("Can't find option: {} in {}".format(option, self.conf_file)) def getboolean(self, option): if self._has_option(self.section, option): return self.config.getboolean(self.section, option) elif self._has_option('common', option): return self.config.getboolean('common', option) else: raise Exception("Can't find option: {} in {}".format(option, self.conf_file))
def main(numthreads=10): t1 = time.time() queue = Queue() factory = TokenFactory() config = ConfigParser() config.read('vk_api.conf') url = API.get_url( app_id=config.get('api', 'id'), app_key=config.get('api', 'key'), permissions=PERMISSIONS, redirect_uri=URI, display=DISPLAY, api_version=VERSION) # TODO: check token expiration token_pair = factory.get_token_pair() if not token_pair: token_pair = factory.store_token_pair(url) api = API(token=token_pair[0],user_id=token_pair[1]) audio = api.audio data = audio.get if data: for item in data['response']['items']: queue.put(item) for i in range(numthreads): t = DownloadThread(queue, FILE_DIR) t.start() queue.join() t2 = time.time() print('Time: {0}'.format(t2-t1))
def create_repositories_from_svn_config(self): logging.info("Reading configuration file %s" % self.config_file) config_parser = ConfigParser() config_parser.read(self.config_file) repositories = [] for section in config_parser.sections(): try: server = config_parser.get(section, 'server') except BaseException as e: logging.critical("Error while parsing config file %s\n%s" % (self.config_file, e)) exit() if config_parser.has_option(section, 'user'): user = config_parser.get(section, 'user') else: user = None if config_parser.has_option(section, 'pass'): password = config_parser.get(section, 'pass') else: password = None repositories.append(SvnRepoMonitor(section, server, user, password, self.config_file)) logging.info('Monitoring SVN repository %s (%s)' % (section, server)) if repositories: return repositories else: logging.error("No sections in configuration file found. Aborting") exit()
def getVersionedFolderInfo(dirPath): """ returns a list containing the following information about the asset in dirPath: [0] last person to check it out, if locked [1] last person to check it in [2] time it was last checked in [3] latest comment on checkin [4] if it is isInstalled [5] filepath to install directory """ if not isVersionedFolder(dirPath): raise Exception("Not a versioned folder") nodeInfo = [] cp = ConfigParser() cp.read(os.path.join(dirPath, ".nodeInfo")) if cp.getboolean("Versioning", "locked"): nodeInfo.append(cp.get("Versioning", "lastcheckoutuser")) else: nodeInfo.append("") nodeInfo.append(cp.get("Versioning", "lastcheckinuser")) nodeInfo.append(cp.get("Versioning", "lastcheckintime")) versionNum = int(cp.get("Versioning", "latestversion")) latestVersion = "v"+("%03d" % versionNum) if cp.has_section("Comments"): nodeInfo.append(cp.get("Comments", latestVersion)) else: nodeInfo.append('') if isInstalled(dirPath): nodeInfo.append("Yes") nodeInfo.append(glob.glob(os.path.join(dirPath, 'stable', '*stable*'))[0]) else: nodeInfo.append("No") nodeInfo.append("") return nodeInfo
def validateSource(self): skip_to_pos = 0 if os.path.exists(self.pickle): upfh = open(self.pickle, "rb") unpickler = Unpickler(upfh) old_opt,old_pos = unpickler.load() if self.opt == old_opt: skip_to_pos = old_pos for i in range(0,skip_to_pos,1): sys.stdout.write(".") pos = -1 files = self.files['humans'].keys() files.sort() cp = ConfigParser() cp.read(os.path.join(path("config"), "test.cnf")) validator_path = cp.get("validation", "validator") csl_schema_path = cp.get("validation", "schema") cslm_schema_path = cp.get("validation", "schema-m") for filename in files: pos += 1 if pos < skip_to_pos: continue p = self.files['humans'][filename] test = CslTest(opt,p,filename,pos=pos) test.parse() test.validate(validator_path, csl_schema_path, cslm_schema_path) if os.path.exists( self.pickle ): os.unlink(self.pickle)
def __init__(self, data_dir, configFile='glastopf.cfg'): if isinstance(configFile, ConfigParser): config = configFile else: config = ConfigParser() config.read(configFile) self.options = {'enabled': config.getboolean('taxii', 'enabled')} self.host = config.get('taxii', 'host') self.port = config.getint('taxii', 'port') self.inbox_path = config.get('taxii', 'inbox_path') self.use_https = config.getboolean('taxii', 'use_https') self.client = HttpClient() self.client.setProxy('noproxy') auth_credentials = {'username': config.get('taxii', 'auth_basic_username'), 'password': config.get('taxii', 'auth_basic_password'), 'key_file': config.get('taxii', 'auth_certificate_keyfile'), 'cert_file': config.get('taxii', 'auth_certificate_certfile')} self.client.setAuthCredentials(auth_credentials) if config.getboolean('taxii', 'use_auth_basic'): self.client.setAuthType(tc.HttpClient.AUTH_BASIC) elif config.getboolean('taxii', 'use_auth_certificate'): self.client.setAuthType(tc.HttpClient.AUTH_CERT) elif config.getboolean('taxii', 'use_auth_basic') and config.getboolean('taxii', 'use_auth_certificate'): self.client.setAuthType(tc.HttpClient.AUTH_CERT_BASIC) else: self.client.setAuthType(tc.HttpClient.AUTH_NONE) self.stix_transformer = StixTransformer(config, data_dir)
def __init__(self, config_filename): locale.setlocale(locale.LC_ALL, '') assert os.path.isfile(config_filename), "Config file not found" local_config_parser = ConfigParser() local_config_parser.read(config_filename) product_info_filename = local_config_parser.get("Config", "info_produtos") self._printer_name = local_config_parser.get("Config", "impressora") assert os.path.isfile(product_info_filename), "Product info file not found" # Set barcode filename self._barcode_filename = os.path.join( os.path.dirname(product_info_filename), "barcode" ) cfg_parser = ConfigParser() cfg_parser.read(product_info_filename) self._primary_categories = dict(cfg_parser.items(self.PRIMARY_CATEGORY_SEC)) self._secondary_categories = dict(cfg_parser.items(self.SECONDARY_CATEGORY_SEC)) if cfg_parser.has_section(self.PRICE_SEC): self.price_list = [] for opt in sorted(cfg_parser.options(self.PRICE_SEC)): self.price_list.append(cfg_parser.getfloat(self.PRICE_SEC, opt)) else: self.price_list = [1.7, 2.21] self._label_header = cfg_parser.get("Label", "header").replace("\\n","\n") self._label_template = cfg_parser.get("Label", "label") self._labels_per_file = 30 self._product_unity = "pç" self._category_on_label = cfg_parser.getint("Geral", "cat_etiqueta")
def runTests(self,bundle=False): cp = ConfigParser() cp.read(os.path.join(path("config"), "test.cnf")) if self.opt.engine == "mozjs": engine = cp.get("mozjs", "command") nick = "mozjs" elif self.opt.engine == "v8": engine = cp.get("v8", "command") nick = "v8" elif self.opt.engine == "jsc": engine = cp.get("jsc", "command") nick = "jsc" else: engine = cp.get("rhino","command") nick = "rhino" bundleext = "" if bundle: bundleext = "-bundled" runpath = os.path.join(path("runners"), "%s%s.js" %(nick,bundleext)) command = "%s %s" % (engine,runpath) ifh = sub.Popen(command,shell=True, stdout=sub.PIPE).stdout while 1: line = ifh.readline() if not line: break line = fixEndings(line) sys.stdout.write(line)
def read_configuration(configuration_file): from ConfigParser import ConfigParser cp = ConfigParser() if configuration_file in cp.read([configuration_file]): db_location = cp.get("bot", "db_dir") token = cp.get("bot", "telegram_token") return YachBotConfiguration(db_location, token)
def loadConfig(self): config = ConfigParser() configFilePath = os.path.join(os.path.dirname(__file__), "data", "config.cfg") config.read(configFilePath) self.setName(config.get("Info", "name")) self.setDescription(config.get("Info", "description")) self.setProject(config.get("Info", "project"))
def _get_config(): """Read configuration options from the ``jenkins_jobs.ini`` config file. Parse the ``jenkins_jobs.ini`` configuration file and return a dict in the following form:: { 'auth': ('username', 'password'), 'url': …, } """ config = ConfigParser() reader = config.readfp if version_info.major == 2 else config.read_file # noqa pylint:disable=no-member with open( os.path.join(os.path.dirname(__file__), os.pardir, 'jenkins_jobs.ini') ) as handle: reader(handle) if version_info.major == 2: return { 'auth': ( config.get('jenkins', 'user'), config.get('jenkins', 'password') ), 'url': config.get('jenkins', 'url'), } else: return { 'auth': (config['jenkins']['user'], config['jenkins']['password']), 'url': config['jenkins']['url'], }
def get_ws_call(action, payload, uid): """ This function builds the url for the outgoing call to the different errata ws. :param payload: payload to be posted :param action: one of the 4 actions: create, update, close, retrieve :param uid: in case of a retrieve call, uid is needed :return: requests call """ config = ConfigParser() config.read(os.path.join(os.getenv('ISSUE_CLIENT_HOME'), 'esgf-client.ini')) if action not in ACTIONS: logging.error('Unrecognized command, refer to the docs for help or use -h, error code: {}.'.format(6)) sys.exit(1) url = config.get(WEBSERVICE, URL_BASE)+config.get(WEBSERVICE, action) if action in [CREATE, UPDATE]: r = requests.post(url, json.dumps(payload), headers=HEADERS) elif action == CLOSE: r = requests.post(url+uid) elif action == RETRIEVE: r = requests.get(url+uid) else: r = requests.get(url) if r.status_code != requests.codes.ok: logging.error('Errata WS call has failed, please refer to the error text for further information: {0}' ', error code: {1}'.format(r.text, 5)) sys.exit(1) return r
def db_settings(backend=None): """ Parses the contents of the db_settings.ini file and returns the connection settings of the required backend inside of a dictionary with the following keys: * backend * user * password * database * host (returned only by PostgreSQL backend) When backend is None the code looks for the value of the DATABASE environment variable. If the environment variable is not set Postgresql is going to be used as the default backend. """ if not backend and os.getenv("DATABASE"): backend = os.getenv("DATABASE") elif not backend: backend = "postgresql" settings = {} config = ConfigParser() config.read(os.path.dirname(os.path.abspath(__file__)) + "/db_settings.ini") settings['backend'] = backend settings['user'] = config.get(backend, 'user') settings['password'] = config.get(backend, 'password') settings['database'] = config.get(backend, 'database') if backend == 'postgresql': settings['host'] = config.get(backend, 'host') return settings
def __init__(self, app, conf): self.app = app self.memcache_servers = conf.get("memcache_servers") serialization_format = conf.get("memcache_serialization_support") if not self.memcache_servers or serialization_format is None: path = os.path.join(conf.get("swift_dir", "/etc/swift"), "memcache.conf") memcache_conf = ConfigParser() if memcache_conf.read(path): if not self.memcache_servers: try: self.memcache_servers = memcache_conf.get("memcache", "memcache_servers") except (NoSectionError, NoOptionError): pass if serialization_format is None: try: serialization_format = memcache_conf.get("memcache", "memcache_serialization_support") except (NoSectionError, NoOptionError): pass if not self.memcache_servers: self.memcache_servers = "127.0.0.1:11211" if serialization_format is None: serialization_format = 2 else: serialization_format = int(serialization_format) self.memcache = MemcacheRing( [s.strip() for s in self.memcache_servers.split(",") if s.strip()], allow_pickle=(serialization_format == 0), allow_unpickle=(serialization_format <= 1), )
def _populate_config_from_old_location(self, conf): if ('rate_limit_after_segment' in conf or 'rate_limit_segments_per_sec' in conf or 'max_get_time' in conf or '__file__' not in conf): return cp = ConfigParser() if os.path.isdir(conf['__file__']): read_conf_dir(cp, conf['__file__']) else: cp.read(conf['__file__']) try: pipe = cp.get("pipeline:main", "pipeline") except (NoSectionError, NoOptionError): return proxy_name = pipe.rsplit(None, 1)[-1] proxy_section = "app:" + proxy_name for setting in ('rate_limit_after_segment', 'rate_limit_segments_per_sec', 'max_get_time'): try: conf[setting] = cp.get(proxy_section, setting) except (NoSectionError, NoOptionError): pass
class Config(object): _raw_options = ("fetch_command", "player_command") _options = ("media_dir",) _expanduser = ("media_dir",) def __init__(self, my_file): my_file = os.path.expanduser(my_file) if not os.path.exists(my_file): with codecs.open(my_file, "w", encoding="utf-8") as fp: fp.write(config_file) raise MarrieError("Missing config file: %s. It will be created for you." % my_file) self._cp = ConfigParser() self._cp.read(my_file) for opt in self._raw_options + self._options: if not self._cp.has_option("config", opt): raise MarrieError("Missing needed config option: config:%s" % opt) def __getattr__(self, attr): opt = None if attr in self._raw_options: opt = self._cp.get("config", attr, True) elif attr in self._options: opt = self._cp.get("config", attr) elif attr == "podcast": opt = OrderedDict(self._cp.items("podcast")) if opt is None: raise AttributeError(attr) if attr in self._expanduser and not isinstance(opt, dict): return os.path.expanduser(opt) return opt
class NightscoutConfig(object): FILENAME = 'config' SECTION = 'NightscoutMenubar' HOST = 'nightscout_host' USE_MMOL = 'use_mmol' def __init__(self, app_name): self.config_path = os.path.join(rumps.application_support(app_name), self.FILENAME) self.config = ConfigParser() self.config.read([self.config_path]) if not self.config.has_section(self.SECTION): self.config.add_section(self.SECTION) if not self.config.has_option(self.SECTION, self.HOST): self.set_host('') if not self.config.has_option(self.SECTION, self.USE_MMOL): self.set_use_mmol(False) def get_host(self): return self.config.get(self.SECTION, self.HOST) def set_host(self, host): self.config.set(self.SECTION, self.HOST, host) with open(self.config_path, 'w') as f: self.config.write(f) def get_use_mmol(self): return bool(self.config.get(self.SECTION, self.USE_MMOL)) def set_use_mmol(self, mmol): self.config.set(self.SECTION, self.USE_MMOL, 'true' if mmol else '') with open(self.config_path, 'w') as f: self.config.write(f)
def loadini(struct, configfile): """Load ini and store in struct""" config_path = os.path.expanduser(configfile) config = ConfigParser() fill_config_with_default_values(config, { "general": { "host": "irc.quakenet.org", "port": 6667, "channels": "#scibbytest", "nickname": "scabby", "plugins_directory": "~/dev/src/scibby-plugins" }}) config.read(config_path) struct.host = config.get("general", "host") struct.port = config.getint("general", "port") struct.channels = config.get("general", "channels").split(",") struct.nickname = config.get("general", "nickname") struct.plugins_directory = config.get("general", "plugins_directory") return struct
def __init__(self, stdin="/dev/null", stdout="/dev/null", stderr="/dev/null"): self.data = {} self.stdin = stdin self.stdout = stdout self.stderr = stderr # Configuration import config = ConfigParser() config.readfp(open(CURRENT_DIR + "include/" + "mong.conf", "rb")) # Append to data stack # self.pidfile = config.get('Server', 'pidfile') # Database initialization host = config.get("Database", "host") user = config.get("Database", "user") passwd = config.get("Database", "passwd") dbname = config.get("Database", "dbname") db = dbApi(host, user, passwd, dbname) # Setting up the pid file pidfile = config.get("Server", "pidfile") self.pidfile = pidfile # Setting up the port for the telnet server port = config.get("Server", "port") self.port = int(port) # Append to data stack self.data["database"] = db self.data["config"] = config self.data["pidfile"] = pidfile self.data["port"] = port
class baseInfo(object): """init the info of API, and get the token for access the api""" def __init__(self, token=None): headers = {} headers["Content-Type"] = "application/json" self.headers = headers self.cf = ConfigParser() self.cf.read(confFile) self.conf = self.getConf() self.catalog, self.token = self.getToken() self.url = [url for url in self.catalog if url["name"] == "ceilometer"] self.url = self.url[0]["endpoints"][0]["publicURL"] def getConf(self): try: conf = { "url": self.cf.get("ser", "OS_AUTH_URL"), "uname": self.cf.get("ser", "OS_USERNAME"), "passwd": self.cf.get("ser", "OS_PASSWORD"), "tname": self.cf.get("ser", "OS_TENANT_NAME") } except Exception as e: logging.critical("加载配置文件失败") logging.critical(e) return conf def getToken(self): headers = self.headers url = self.conf["url"] + "/tokens" data = '{"auth": {"tenantName": "%s", "passwordCredentials": {"username": "******", "password": "******"}}}' data = data % (self.conf["tname"], self.conf["uname"], self.conf["passwd"]) try: logging.debug("开始获取Token") ret = requests.post(url, data=data, headers=headers) #print ret.url logging.debug("request url:%s" % ret.url) ret = ret.json() except Exception as e: msg = "获取Token失败 data:%s headers:%s" % (data, headers) logging.critical(msg) logging.critical(e) catalog = ret["access"]["serviceCatalog"] token = ret["access"]["token"]["id"] return catalog, token def getCResp(self, suffix, method, data=None, headers=None, params=None, isjson=True): """return the result of ceilometer response""" url = self.url + suffix if headers == None: headers = self.headers.copy() headers["X-Auth-Token"] = self.token req = getattr(requests, method) try: ret = req(url, data=data, headers=headers, params=params, verify=False) #print ret.url logging.debug("request url:%s" % ret.url) except Exception as e: msg = "%s访问%s失败 data:%s headers:%s" % (method, suffix, data, headers) logging.critical(msg) logging.critical(e) sys.exit(1) if ret.status_code == 401: self.catalog, self.token = self.getToken() headers["X-Auth-Token"] = self.token ret = req(url, data=data, headers=headers) if isjson: ret = ret.json() return ret
# EP_DB_PASS=<password to the database> # EP_DB_DB=<name of database> # EP_SERVICE_PORT=<port on which the app is running> env_config = {key[3:].lower(): val for key, val in os.environ.items() if key.startswith('EP_')} for k, v in env_config.items(): keyparts = k.split('_') sectionname = keyparts[0] varname = '_'.join(keyparts[1:]) try: cfg.set(sectionname, varname, v) except NoSectionError: cfg.add_section(sectionname) cfg.set(sectionname, varname, v) settings = dict( debug=True, template_path='templates/' ) application = tornado.web.Application([ (r"/", IndexHandler), (r"/query", MainHandler, dict(config=cfg)), (r"/dcat", DcatHandler, dict(config=cfg)), (r"/stationsminmax", StationMinMaxHandler, dict(config=cfg)), (r"/stations", StationHandler, dict(config=cfg)), ], **settings) application.listen(cfg.get('service', 'port')) tornado.ioloop.IOLoop.current().start()
def main(): config_file = CONFIG_FILE offline = 0 verbose = 0 for arg in sys.argv[1:]: if arg == "-h" or arg == "--help": print "Usage: planet [options] [CONFIGFILE]" print print "Options:" print " -v, --verbose DEBUG level logging during update" print " -o, --offline Update the Planet from the cache only" print " -h, --help Display this help message and exit" print sys.exit(0) elif arg == "-v" or arg == "--verbose": verbose = 1 elif arg == "-o" or arg == "--offline": offline = 1 elif arg.startswith("-"): print >> sys.stderr, "Unknown option:", arg sys.exit(1) else: config_file = arg # Read the configuration file config = ConfigParser() config.read(config_file) if not config.has_section("Planet"): print >> sys.stderr, "Configuration missing [Planet] section." sys.exit(1) # Read the [Planet] config section planet_name = config_get(config, "Planet", "name", PLANET_NAME) planet_link = config_get(config, "Planet", "link", PLANET_LINK) planet_feed = config_get(config, "Planet", "feed", PLANET_FEED) owner_name = config_get(config, "Planet", "owner_name", OWNER_NAME) owner_email = config_get(config, "Planet", "owner_email", OWNER_EMAIL) if verbose: log_level = "DEBUG" else: log_level = config_get(config, "Planet", "log_level", LOG_LEVEL) feed_timeout = config_get(config, "Planet", "feed_timeout", FEED_TIMEOUT) template_files = config_get(config, "Planet", "template_files", TEMPLATE_FILES).split(" ") # Default feed to the first feed for which there is a template if not planet_feed: for template_file in template_files: name = os.path.splitext(os.path.basename(template_file))[0] if name.find('atom') >= 0 or name.find('rss') >= 0: planet_feed = urlparse.urljoin(planet_link, name) break # Define locale if config.has_option("Planet", "locale"): # The user can specify more than one locale (separated by ":") as # fallbacks. locale_ok = False for user_locale in config.get("Planet", "locale").split(':'): user_locale = user_locale.strip() try: locale.setlocale(locale.LC_ALL, user_locale) except locale.Error: pass else: locale_ok = True break if not locale_ok: print >> sys.stderr, "Unsupported locale setting." sys.exit(1) # Activate logging planet.logging.basicConfig() planet.logging.getLogger().setLevel(planet.logging.getLevelName(log_level)) log = planet.logging.getLogger("planet.runner") try: log.warning except: log.warning = log.warn # timeoutsocket allows feedparser to time out rather than hang forever on # ultra-slow servers. Python 2.3 now has this functionality available in # the standard socket library, so under 2.3 you don't need to install # anything. But you probably should anyway, because the socket module is # buggy and timeoutsocket is better. if feed_timeout: try: feed_timeout = float(feed_timeout) except: log.warning("Feed timeout set to invalid value '%s', skipping", feed_timeout) feed_timeout = None ''' if feed_timeout and not offline: try: from planet import timeoutsocket timeoutsocket.setDefaultSocketTimeout(feed_timeout) log.debug("Socket timeout set to %d seconds", feed_timeout) except ImportError: import socket if hasattr(socket, 'setdefaulttimeout'): log.debug("timeoutsocket not found, using python function") socket.setdefaulttimeout(feed_timeout) log.debug("Socket timeout set to %d seconds", feed_timeout) else: log.error("Unable to set timeout to %d seconds", feed_timeout) ''' # run the planet my_planet = planet.Planet(config) my_planet.run(planet_name, planet_link, template_files, offline) my_planet.generate_all_files(template_files, planet_name, planet_link, planet_feed, owner_name, owner_email)
class ChiLinConfig(ConfigParser): def __init__(self, conf, args): ConfigParser.__init__(self) self._verbose_level = 1 self._conf = ConfigParser() if not os.path.exists(conf): raise IOError("No such config file: %s" % repr(conf)) self._conf.read(conf) self.root_dir = os.path.dirname(conf) self.pe = args.pe self.long = False def write(self, fileobj): self._conf.write(fileobj) def set_option(self, verbose_level=1): """ :type verbose_level: int verbose_level: 1 - only show fatal errors (quiet mode) 2 - show fatal errors and warnings (normal mode) 3 - show workflow details (verbose mode) 4 - debug mode (debug mode) """ self._verbose_level = verbose_level def get(self, section, option, default=None): try: return self._conf.get(section, option) except NoOptionError: if default: return default else: raise @property def treatment_pairs_pe(self): return list( zip(self.treatment_raws, self.treatment_pair_targets["pairs"])) def set(self, section, option, value): try: self._conf.set(section, option, value) except NoOptionError: raise def get_path(self, section, option): return self.to_abs_path(self.get(section, option)) def items(self, section): try: return self._conf.items(section) except NoSectionError: if self._verbose_level >= 2: print("Warning: No such section: ", section) print("This will return a empty dict") return {} @property def id(self): return self.get("basics", "id") @property def target_dir(self): return self.get("basics", "output") @property def prefix(self): return os.path.join(self.target_dir, self.id) @property def json_prefix(self): return os.path.join(self.category("json"), self.id) @property def latex_prefix(self): return os.path.join(self.category("latex"), self.id) @property def treatment_pairs(self): """ one to one in single end mode, original, target two to one in pair end mode [original_pair1, original_pair2], target """ if not self.pe: return list(zip(self.treatment_raws, self.treatment_targets)) else: return self.treatment_pairs_pe @property def control_pairs(self): if not self.pe: return list(zip(self.control_raws, self.control_targets)) else: return self.control_pairs_pe @property def sample_pairs(self): return self.treatment_pairs + self.control_pairs def to_abs_path(self, path): abs_path = path if not os.path.isabs(path): #abs_path = os.path.join(self.root_dir, abs_path) abs_path = os.path.abspath(abs_path) return abs_path @property def control_raws(self): if self.get("basics", "cont").strip(): if not self.pe: return [ self.to_abs_path(i.strip()) for i in self.get("basics", "cont").split(",") ] else: data_list = [] for i in self.get("basics", "cont").split(";"): data_list.append( [self.to_abs_path(j.strip()) for j in i.split(",")]) return data_list return [] @property def treatment_raws(self): """ single end data separate by , for replicates pair end data separate by ; for replicates , for pairs """ if self.get("basics", "treat"): if not self.pe: return [ self.to_abs_path(i.strip()) for i in self.get("basics", "treat").split(",") ] else: data_list = [] for i in self.get("basics", "treat").split(";"): data_list.append( [self.to_abs_path(j.strip()) for j in i.split(",")]) return data_list else: raise NoTreatmentData # previous interface only for SE # @property # def treatment_raws(self): # if self.get("basics", "treat").strip(): # return [self.to_abs_path(i.strip()) for i in self.get("basics", "treat").split(",")] # else: # raise NoTreatmentData # previous interface only for SE # @property # def treatment_targets(self): # return [os.path.join(self.target_dir, # self.id + "_treat_rep" + str(num+1) # ) for num in range(len(self.treatment_raws))] @property def pe(self): return self._pe @pe.setter def pe(self, value): '''setting Pair End state, True for PE, False for SE ''' self._pe = value @property def treatment_targets(self): if not self.pe: return self.treatment_single_targets else: return self.treatment_pair_targets["reps"] @property def control_pairs_pe(self): return list(zip(self.control_raws, self.control_pair_targets["pairs"])) @property def treatment_pair_data(self): return self.treatment_pair_targets["pairs"] @property def treatment_single_targets(self): return [ os.path.join(self.target_dir, self.id + "_treat_rep" + str(num + 1)) for num in range(len(self.treatment_raws)) ] @property def treatment_pair_targets(self): '''pairs: for [[rep1_pair1, rep1_pair2]], usually for evaluating read quality reps: for [rep1, rep2], usually for mapping pair end data ''' return { "pairs": [[ os.path.join(self.target_dir, self.id + "_treat_rep" + str(num + 1)) + "pair1", os.path.join(self.target_dir, self.id + "_treat_rep" + str(num + 1)) + "pair2" ] for num in range(len(self.treatment_raws))], "reps": [ os.path.join(self.target_dir, self.id + "_treat_rep" + str(num + 1)) for num in range(len(self.treatment_raws)) ] } @property def control_pair_targets(self): '''pairs: for [[rep1_pair1, rep1_pair2]], usually for evaluating read quality reps: for [rep1, rep2], usually for mapping pair end data ''' return { "pairs": [[ os.path.join(self.target_dir, self.id + "_control_rep" + str(num + 1)) + "pair1", os.path.join(self.target_dir, self.id + "_control_rep" + str(num + 1)) + "pair2" ] for num in range(len(self.control_raws))], "reps": [ os.path.join(self.target_dir, self.id + "_control_rep" + str(num + 1)) for num in range(len(self.control_raws)) ] } @property def control_targets(self): if not self.pe: return self.control_single_targets else: return self.control_pair_targets["reps"] @property def control_single_targets(self): return [ os.path.join(self.target_dir, self.id + "_control_rep" + str(num + 1)) for num in range(len(self.control_raws)) ] @property def sample_targets(self): return self.treatment_targets + self.control_targets def _base(self, path): return os.path.basename(path) @property def treatment_bases(self): return [self._base(i) for i in self.treatment_targets] @property def control_bases(self): return [self._base(i) for i in self.control_targets] @property def sample_bases(self): return [self._base(i) for i in self.sample_targets] def category(self, category_name): target_path = os.path.join(self.target_dir, category_name) if not os.path.exists(target_path): os.makedirs(target_path) return target_path @property def log(self): log_path = os.path.join(self.target_dir, 'log') if not os.path.exists(log_path): os.makedirs(log_path) logger = logging.getLogger() handler = logging.FileHandler(os.path.join(log_path, self.id + '.log')) logger.addHandler(handler) logger.setLevel(logging.NOTSET) return logger
def important_files(self): # Adding "special ini and png file for special_file in ['ini', 'png']: get_file_method = getattr(self, 'get_%s_file' % special_file.lower()) get_url_method = getattr(self, 'get_%s_url' % special_file.lower()) filename = get_file_method() url = get_url_method() if os.path.exists(filename): yield self.file_info(special_file, url, filename) # Adding files for docker for docker_file in [ 'attributes', 'settings', 'configure', 'configure_host', 'setup', 'store_data', 'restore_data_before_setup', 'restore_data_after_setup', 'update_available', 'update_packages', 'update_release', 'update_app_version', 'univention-config-registry-variables', 'schema', 'preinst', 'inst', 'init', 'prerm', 'uinst', 'env', ]: for filename in glob(self._components_dir(docker_file)): basename = os.path.basename(filename) url = self._repository_url(basename) yield self.file_info(basename, url, filename) # Adding logo file config = ConfigParser() config.read(self.get_ini_file()) if config.has_option('Application', 'Logo'): basename = config.get('Application', 'Logo') filename = self._meta_inf_dir(basename) url = self._meta_url(basename) yield self.file_info(basename, url, filename) # Adding LICENSE_AGREEMENT and localised versions like LICENSE_AGREEMENT_DE for readme_filename in glob( self._components_dir('LICENSE_AGREEMENT*')): basename = os.path.basename(readme_filename) url = self._repository_url(basename) yield self.file_info(basename, url, readme_filename) # Adding README, README_UPDATE, README_INSTALL, REAME_POST_UPDATE, README_POST_INSTALL # and all the localised versions like README_DE and README_POST_INSTALL_EN (and even *_FR) for readme_filename in glob(self._components_dir('README*')): basename = os.path.basename(readme_filename) url = self._repository_url(basename) yield self.file_info(basename, url, readme_filename)
def _copy_meta_files(self, component_id, meta_inf_dir, repo_dir, args): ini_file = os.path.join(meta_inf_dir, '%s.ini' % component_id) if args.clear: if os.path.exists(repo_dir): rmdir(repo_dir) if os.path.exists(ini_file): os.unlink(ini_file) if args.ini: self.copy_file(args.ini, ini_file) if not os.path.exists(ini_file): self.warn('Stopping here due to no ini file') return app_en = App.from_ini(ini_file, 'en') app_de = App.from_ini(ini_file, 'de') if args.clear: self._build_repo_dir(app_en, component_id, args.path, args.ucs_version) if not app_en or not app_de: raise LocalAppCenterError('Cannot continue with flawed ini file') if args.logo: if LooseVersion(args.ucs_version) >= '4.1': parser = ConfigParser() parser.read(ini_file) try: logo_fname = parser.get('Application', 'Logo') except NoOptionError: self.fatal('No Logo specified in ini file!') else: self.copy_file(args.logo, os.path.join(meta_inf_dir, logo_fname)) else: self.copy_file(args.logo, os.path.join(meta_inf_dir, '%s.png' % component_id)) if args.logo_detail_page: parser = ConfigParser() parser.read(ini_file) try: logo_detail_fname = parser.get('Application', 'LogoDetailPage') except NoOptionError: self.fatal('No Logo specified in ini file!') self.copy_file(args.logo_detail_page, os.path.join(meta_inf_dir, logo_detail_fname)) if args.screenshot: self.copy_file(args.screenshot[0], os.path.join(meta_inf_dir, app_en.screenshot)) if len(args.screenshot) > 1: self.copy_file(args.screenshot[1], os.path.join(meta_inf_dir, app_de.screenshot)) if args.thumbnails: thumbnails = [] for thumbnail in app_en.thumbnails + app_de.thumbnails: if thumbnail in thumbnails: continue if thumbnail.startswith('http'): continue thumbnails.append(thumbnail) for i, thumbnail in enumerate(args.thumbnails): try: self.copy_file(thumbnail, os.path.join(meta_inf_dir, thumbnails[i])) except IndexError: raise LocalAppCenterError('The ini file must state as much Thumbnails= as --thumbnails are given') if args.readme: for readme in args.readme: self.copy_file(readme, repo_dir) if args.license: for license in args.license: self.copy_file(license, repo_dir) if args.ucr: self.copy_file(args.ucr, os.path.join(repo_dir, 'univention-config-registry-variables')) if args.schema: self.copy_file(args.schema, os.path.join(repo_dir, 'schema')) if args.attributes: self.copy_file(args.attributes, os.path.join(repo_dir, 'attributes')) if args.configure: self.copy_file(args.configure, os.path.join(repo_dir, 'configure')) if args.configure_host: self.copy_file(args.configure_host, os.path.join(repo_dir, 'configure_host')) if args.settings: self.copy_file(args.settings, os.path.join(repo_dir, 'settings')) if args.preinst: self.copy_file(args.preinst, os.path.join(repo_dir, 'preinst')) if args.join: self.copy_file(args.join, os.path.join(repo_dir, 'inst')) if args.prerm: self.copy_file(args.prerm, os.path.join(repo_dir, 'prerm')) if args.unjoin: self.copy_file(args.unjoin, os.path.join(repo_dir, 'uinst')) if args.init: self.copy_file(args.init, os.path.join(repo_dir, 'init')) if args.setup: self.copy_file(args.setup, os.path.join(repo_dir, 'setup')) if args.store_data: self.copy_file(args.store_data, os.path.join(repo_dir, 'store_data')) if args.restore_data_before_setup: self.copy_file(args.restore_data_before_setup, os.path.join(repo_dir, 'restore_data_before_setup')) if args.restore_data_after_setup: self.copy_file(args.restore_data_after_setup, os.path.join(repo_dir, 'restore_data_after_setup')) if args.update_available: self.copy_file(args.update_available, os.path.join(repo_dir, 'update_available')) if args.update_packages: self.copy_file(args.update_packages, os.path.join(repo_dir, 'update_packages')) if args.update_release: self.copy_file(args.update_release, os.path.join(repo_dir, 'update_release')) if args.update_app_version: self.copy_file(args.update_app_version, os.path.join(repo_dir, 'update_app_version')) if args.env: self.copy_file(args.env, os.path.join(repo_dir, 'env')) return app_en
class AppcenterApp(object): def __init__(self, name, id, ucs_version, meta_inf_dir, components_dir, server): self.name = name self.id = id self.ucs_version = ucs_version self.meta_inf_dir = meta_inf_dir self.app_dir = '' if os.path.exists(os.path.join(self.meta_inf_dir, self.id)): # since UCS 4.1, each app has a separate subdirectory self.app_dir = self.id self.components_dir = components_dir if server.endswith('/'): server = server[:-1] self.server = server self.config = ConfigParser() self.config.read([self.get_ini_file(), self.get_meta_file()]) def get_metainf_url(self): url = '%s/meta-inf/%s/' % (self.server, self.ucs_version) return url def get_repository_url(self): return '%s/univention-repository/%s/maintained/component/%s/' % (self.server, self.ucs_version, self.name) def _meta_url(self, filename, with_app_dir=True): path = filename if with_app_dir: path = os.path.join(self.app_dir, filename) return urllib2.urlparse.urljoin(self.get_metainf_url(), path) def _repository_url(self, filename): return urllib2.urlparse.urljoin(self.get_repository_url(), filename) def _components_dir(self, filename): return os.path.join(self.components_dir, self.name, filename) def _meta_inf_dir(self, filename, with_app_dir=True): path = self.meta_inf_dir if with_app_dir: path = os.path.join(path, self.app_dir) return os.path.join(path, filename) def get_meta_file(self): return self._meta_inf_dir('%s.meta' % self.id) def get_meta_url(self): return self._meta_url('%s.meta' % self.id) def get_ini_file(self): return self._meta_inf_dir('%s.ini' % self.name) def get_ini_url(self): return self._meta_url('%s.ini' % self.name) def get_png_file(self): # since UCS 4.1 deprecated return self._meta_inf_dir('%s.png' % self.name) def get_png_url(self): # since UCS 4.1 deprecated return self._meta_url('%s.png' % self.name) def file_info(self, name, url, filename): return FileInfo(self, name, url, filename) def important_files(self): # Adding "special ini and png file for special_file in ['ini', 'png', 'meta']: get_file_method = getattr(self, 'get_%s_file' % special_file.lower()) get_url_method = getattr(self, 'get_%s_url' % special_file.lower()) filename = get_file_method() url = get_url_method() if os.path.exists(filename): yield self.file_info(special_file, url, filename) # Adding logo files for ikey in ('Logo', 'LogoDetailPage'): if self.config.has_option('Application', ikey): basename = self.config.get('Application', ikey) filename = self._meta_inf_dir(basename) url = self._meta_url(basename) if os.path.isfile(filename): yield self.file_info(ikey.lower(), url, filename) # Adding LICENSE_AGREEMENT and localised versions like LICENSE_AGREEMENT_DE for readme_filename in glob(self._components_dir('LICENSE_AGREEMENT*')): basename = os.path.basename(readme_filename) url = self._repository_url(basename) yield self.file_info(basename, url, readme_filename) # Adding README, README_UPDATE, README_INSTALL, REAME_POST_UPDATE, README_POST_INSTALL # and all the localised versions like README_DE and README_POST_INSTALL_EN (and even *_FR) for readme_filename in glob(self._components_dir('README*')): basename = os.path.basename(readme_filename) url = self._repository_url(basename) yield self.file_info(basename, url, readme_filename) # Adding ucr, schema, (un)joinscript, etc for ext in ['univention-config-registry-variables', 'schema', 'attributes', 'configure', 'configure_host', 'settings', 'preinst', 'inst', 'init', 'prerm', 'uinst', 'setup', 'store_data', 'restore_data_before_setup', 'restore_data_after_setup', 'update_available', 'update_packages', 'update_release', 'update_app_version', 'env']: control_filename = self._components_dir(ext) if os.path.exists(control_filename): basename = os.path.basename(control_filename) url = self._repository_url(basename) yield self.file_info(ext, url, control_filename) def tar_files(self): for file_info in self.important_files(): yield file_info.filename, file_info.archive_filename def to_index(self): index = {} for file_info in self.important_files(): index[file_info.name] = {'url': file_info.url, 'md5': file_info.md5, 'sha256': file_info.sha256} return index
from gi.repository import Notify from gi.repository.Gio import File from gi.repository.GdkPixbuf import Pixbuf from mpd import MPDClient configFile = os.path.expanduser("~/.config/musnify-mpd/musnify-mpd.config") if not os.path.isfile(configFile): print("Loading default config") configFile = "/etc/musnify-mpd.config" config = ConfigParser() config.read(configFile) host = config.get("mpd", "host", fallback=os.environ.get("MPD_HOST", "localhost")) port = config.get("mpd", "port", fallback=os.environ.get("MPD_PORT", 6600)) if config.has_option("apiKey", "lastfm"): apiKey = config.get("apiKey", "lastfm") musicLibrary = os.path.expanduser( config.get("mpd", "musiclibrary", fallback='~/Music')) + "/" debug = False class MPDWrapper: def __init__(self, host="localhost", port="6600"): self.client = MPDClient() self.client.timeout = 1 self.client.idletimeout = None
def main(): from apscheduler.schedulers.background import BackgroundScheduler if len(sys.argv) != 2: sys.exit("usage: %s <config-file>" % sys.argv[0]) try: from ConfigParser import ConfigParser except ImportError: # python3 from configparser import ConfigParser try: config = ConfigParser(inline_comment_prefixes=(';', )) except TypeError: # not python3 config = ConfigParser() config.readfp(open(sys.argv[1])) global logfile logfile = config.get("global", "logfile") FORMAT = "%(asctime)-15s: %(message)s" logging.basicConfig(level=logging.INFO, filename=logfile, filemode='w', format=FORMAT) # Set time on WDLXTV systems rdate = "/usr/sbin/rdate" if os.path.exists(rdate) and os.access(rdate, os.X_OK): cmd = [rdate, "ntp.internode.on.net"] subprocess.Popen(cmd).wait() logging.info( "Main process PID: %d, use this for sending SIGHUP " "for re-reading the schedule-file", os.getpid()) global tuners tuners = TUNERS(config.get("global", "tuners")) global hdhomerun_config hdhomerun_config = config.get("global", "hdhomerun_config") schedule_file = config.get("global", "schedule_file") media_dir = config.get("global", "media_dir") channelmap = {} for opt in config.options("channelmap"): channelmap[opt] = config.get("channelmap", opt).split(",") while True: global reload_jobs, shutdown reload_jobs = False shutdown = False sched = BackgroundScheduler(daemon=False) sched.start() signal.signal(signal.SIGHUP, sighup_handler) signal.signal(signal.SIGTERM, sigterm_handler) schedule_jobs(sched, schedule_file, channelmap, media_dir) while not (reload_jobs or shutdown): signal.pause() sched.shutdown() if shutdown: sys.exit(0)
""" Unregister an executor from the map. :param name: The name of the executor to unregister. :type name: str """ del _task_map[name] # Register the core executors that are always enabled. register_executor('python', python_run) register_executor('workflow', workflow_run) # Load plugins that are enabled in the config file _plugins = os.environ.get('ROMANESCO_PLUGINS_ENABLED', config.get('romanesco', 'plugins_enabled')) _plugins = [p.strip() for p in _plugins.split(',') if p.strip()] _paths = os.environ.get('ROMANESCO_PLUGIN_LOAD_PATH', config.get('romanesco', 'plugin_load_path')).split(':') _paths = [p for p in _paths if p.strip()] _paths.append(os.path.join(PACKAGE_DIR, 'plugins')) utils.load_plugins(_plugins, _paths) def load(task_file): """ Load a task JSON into memory, resolving any ``"script_uri"`` fields by replacing it with a ``"script"`` field containing the contents pointed to by ``"script_uri"`` (see :py:mod:`romanesco.uri` for URI formats). A ``script_fetch_mode`` field may also be set
def get(self, section, option, **kwargs): value = PythonConfigParser.get(self, section, option, **kwargs) if PY2: if type(value) is str: return value.decode('utf-8') return value
import os import sys sys.path.append('../') from src.com.dis.client import disclient from src.com.dis.models.base_model import IS_PYTHON2 if IS_PYTHON2: from ConfigParser import ConfigParser else: from configparser import ConfigParser fp = '../conf.ini' conf = ConfigParser() conf.read(fp) # Use configuration file try: projectid = conf.get('Section1', 'projectid') ak = conf.get('Section1', 'ak') sk = conf.get('Section1', 'sk') region = conf.get('Section1', 'region') endpoint = conf.get('Section1', 'endpoint') except Exception as ex: print(str(ex)) # projectid = "your projectid" # endpoint = " " # ak = "*** Provide your Access Key ***" # sk = "*** Provide your Secret Key ***" # region = " " streamname = "dis-w_p" startSeq = '0'
def loginToBRX(sensorId): global Brapi global SensorId SHORT_SLEEP = 1 # 5 LONG_SLEEP = 1 # 60 seconds ATTEMPTS = 1 # 2 SensorId = sensorId try: # Read configuration parameters from Blackrain config file config = ConfigParser() config.read("/etc/blackrain.conf") brxName = config.get('brx', 'brxName') brxIP = config.get('brx', 'brxIP') brxPort = config.get('brx', 'brxPort') # Lookup IP using /etc/hosts file first (local override for testing) a = "BRX DNS name : " + brxName.__str__() logging.info(a) # print a a = "BRX IP : " + brxIP.__str__() logging.info(a) #print a a = "BRX port number : " + brxPort.__str__() logging.info(a) #print a for i in range(ATTEMPTS): # 2 attempts # Establish a session to BRX start = time.time() Brapi = BlackRainClient.BlackrainAPI( brxIP, brxPort, sensorId) # hard-coded sensorID end = time.time() apiTime = end - start logging.info("BRAPI version v" + Brapi.get_version().__str__()) #print "i=" + i.__str__() + " : establishing session to BlackRain Mothership @ " + brxIP + ":" + brxPort + "..." responseFromBrapi = Brapi.establish_session() if responseFromBrapi != BlackRainClient.Status.ACCESS_SUCCESS: a = "[+] Warning : loginToBRX() FAILED attempt = " + i.__str__( ) + ", response = " + responseFromBrapi.__str__( ) + " BRX_LOGIN_PERF : %.2f" % apiTime + " secs" #print a #logging.critical(a) #syslog.syslog(a) #print "Waiting..." if i == 0: time.sleep( SHORT_SLEEP) # on first failure, use short wait else: time.sleep( LONG_SLEEP ) # wait 60 seconds - probably a persistent failure now continue # have another attempt else: a = "[+] loginToBRX() OK : response = " + responseFromBrapi.__str__( ) + " BRX_LOGIN_PERF : %.2f" % apiTime + " secs" #print a logging.info(a) #syslog.syslog(a) return # return # Retry schedule expired #syslog.syslog("BRX_LOGIN_ERROR : Failed to login to BRX") return except Exception, e: #logging.critical("Exception : kojoney_blackrain.loginToBRX() : " + e.__str__()) return
from django.http import HttpResponseRedirect from django.core.paginator import Paginator, EmptyPage, InvalidPage from django.core.exceptions import ObjectDoesNotExist from django.core.mail import EmailMessage from ldapuser.models import * from asset.models import * from ldaplog.models import * BASE_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) CONF = ConfigParser() CONF.read(os.path.join(BASE_DIR, 'ldap.conf')) LOG_DIR = os.path.join(BASE_DIR, 'logs') SSH_KEY_DIR = os.path.join(BASE_DIR, 'keys') SERVER_KEY_DIR = os.path.join(SSH_KEY_DIR, 'server') KEY = CONF.get('base', 'key') LOGIN_NAME = getpass.getuser() LDAP_ENABLE = CONF.getint('ldap', 'ldap_enable') SEND_IP = CONF.get('base', 'ip') SEND_PORT = CONF.get('base', 'port') MAIL_FROM = CONF.get('mail', 'email_host_user') class LDAPMgmt(): def __init__(self, host_url, base_dn, root_cn, root_pw): self.ldap_host = host_url self.ldap_base_dn = base_dn self.conn = ldap.initialize(host_url)
def parserconf(section,option): configfile = '/etc/backup/backup.conf' config = ConfigParser() config.read(configfile) return config.get(section, option)
import os, cPickle, imp, zipfile, zlib, traceback, shutil from Queue import Empty, Queue from multiprocessing import Process, Queue, cpu_count, freeze_support, Lock from datetime import datetime from ConfigParser import ConfigParser from binascii import unhexlify as unhex, hexlify from Crypto.Cipher import DES3 freeze_support() startTime = datetime.now() #time this cpu hog #Get path to Eve installation from evedec.ini file config = ConfigParser() config.read('evedec.ini') eve_path = config.get('main', 'eve_path') #use version info from eve's common.ini to create directory name eveconfig = ConfigParser() eveconfig.read(os.path.join(eve_path, 'start.ini')) store_path = os.path.join(config.get('main', 'store_path'), \ 'eve-%s.%s' % (eveconfig.get('main', 'version'), eveconfig.get('main', 'build'))) store_path = os.path.abspath(store_path) #search blue.dll for keyblob header #yeah, it's really that easy #dest = os.path.join(store_path,'__binaries') #destination = shutil.copytree(eve_path, dest)
class PlatformConfig: def __init__(self, config_dir): self.parser = ConfigParser() self.filename = join(config_dir, PLATFORM_CONFIG_NAME) if (not isfile(self.filename)): raise Exception('platform config does not exist: {0}'.format(self.filename)) self.parser.read(self.filename) def apps_root(self): return self.__get('apps_root') def data_root(self): return self.__get('data_root') def configs_root(self): return self.__get('configs_root') def config_root(self): return self.__get('config_root') def www_root_internal(self): return self.__get('www_root_internal') def www_root_public(self): return self.__get('www_root_public') def app_dir(self): return self.__get('app_dir') def data_dir(self): return self.__get('data_dir') def config_dir(self): return self.__get('config_dir') def bin_dir(self): return self.__get('bin_dir') def nginx_config_dir(self): return self.__get('nginx_config_dir') def cron_user(self): return self.__get('cron_user') def cron_cmd(self): return self.__get('cron_cmd') def openssl(self): return self.__get('openssl') def nginx(self): return self.__get('nginx') def cron_schedule(self): return self.__get('cron_schedule') def get_log_root(self): return self.__get('log_root') def get_log_sender_pattern(self): return self.__get('log_sender_pattern') def get_internal_disk_dir(self): return self.__get('internal_disk_dir') def get_external_disk_dir(self): return self.__get('external_disk_dir') def get_disk_link(self): return self.__get('disk_link') def get_disk_root(self): return self.__get('disk_root') def get_ssh_port(self): return self.__get('ssh_port') def get_rest_internal_log(self): return self.__get('rest_internal_log') def get_rest_public_log(self): return self.__get('rest_public_log') def get_ssl_certificate_file(self): return self.__get('ssl_certificate_file') def get_ssl_ca_certificate_file(self): return self.__get('ssl_ca_certificate_file') def get_ssl_ca_serial_file(self): return self.__get('ssl_ca_serial_file') def get_ssl_certificate_request_file(self): return self.__get('ssl_certificate_request_file') def get_default_ssl_certificate_file(self): return self.__get('default_ssl_certificate_file') def get_ssl_key_file(self): return self.__get('ssl_key_file') def get_ssl_ca_key_file(self): return self.__get('ssl_ca_key_file') def get_default_ssl_key_file(self): return self.__get('default_ssl_key_file') def get_openssl_config(self): return self.__get('openssl_config') def get_platform_log(self): return self.__get('platform_log') def get_hooks_root(self): return self.__get('hooks_root') def is_certbot_test_cert(self): return self.parser.getboolean('platform', 'certbot_test_cert') def get_boot_extend_script(self): return self.__get('boot_extend_script') def get_disk_format_script(self): return self.__get('disk_format_script') def get_snapd_upgrade_script(self): return self.__get('snapd_upgrade_script') def get_channel(self): return self.__get('channel') def __get(self, key): return self.parser.get('platform', key)
# Written By: Steven McGrath # Verison: Build 042 # Date: 05/01/2012 import sccsv import securitycenter import json import os from ConfigParser import ConfigParser conf = ConfigParser() conf.read('csv_gen.conf') sccsv.debug.DEBUG = conf.getboolean('Settings', 'debug') sc = SecurityCenter4(conf.get('Settings', 'address') port=conf.getint('Settings', 'port')) sc.login(conf.get('Settings', 'username'), conf.get('Settings', 'password')) def build_and_email(section): # The first thing that we need to do is get all of the email configuration # stuff loaded up. This will involve some minor parsing and in some cases # we will need to check to see if there is a local variable set to override # the global one that is set in the Settings stanza. email_to = conf.get(section, 'email_to').split(',') email_from = conf.get('Settings', 'email_from') email_host = conf.get('Settings', 'smtp_host') if conf.has_option(section, 'email_msg'):
if 'sdist' in sys.argv: if os.path.exists(P4_RELNOTES): deleteReleaseNotes() copyReleaseNotes() distdir = global_dist_directory + VersionInfo(".").getDistVersion() if os.path.exists(distdir): shutil.rmtree(distdir, False, force_remove_file) p4_api_dir = get_api_dir() ssl = None if '--ssl' in sys.argv: index = sys.argv.index("--ssl") if index < len(sys.argv) - 1: ssl = sys.argv[index + 1] del sys.argv[index:index+2] else: ssl = "" del sys.argv[index:index+1] else: config = ConfigParser() config.read(P4_CONFIG_FILE) if config.has_section(P4_CONFIG_SECTION): if config.has_option(P4_CONFIG_SECTION, P4_CONFIG_SSLDIR): ssl = config.get(P4_CONFIG_SECTION, P4_CONFIG_SSLDIR) do_setup(p4_api_dir, ssl)
def get_auto_mount(): with open(CFG_FILE) as cfg: parser = ConfigParser() parser.readfp(cfg) return parser.get("mount_media", "auto_mount")
import os from ConfigParser import ConfigParser from boto.mturk.connection import MTurkConnection framework_root = os.path.abspath( os.path.join(os.path.dirname(os.path.abspath(__file__)), os.path.pardir)) configfilename = os.path.join(framework_root, 'config.txt') config = ConfigParser() config.read( configfilename ) host = 'mechanicalturk.amazonaws.com' mturkparams = dict( aws_access_key_id = config.get( 'AWS Access', 'aws_access_key_id' ), aws_secret_access_key = config.get( 'AWS Access', 'aws_secret_access_key' ), host=host) mtc = MTurkConnection( **mturkparams ) # Now let's get a list of all the assignment objects page_size = 50 hitpages = [] newhits = True #while newhits: # newhits = mtc.get_all_hits() # hitpages.append(newhits) # print dir(newhits) hitpages = list( mtc.get_all_hits() ) hitpages.extend( list( mtc.get_reviewable_hits(page_size=100) ) )
def main(): if len(sys.argv) < 2: sys.stderr.write('Usage: %s CONFIG_URI\n' % sys.argv[0]) sys.exit(1) config_uri = sys.argv.pop(1) # The defaults allow to set sensible values and not edit all the local.ini # files. Err towards conservative production values. # If something SHOULD be defined, assert its presence later on. # TODO: Subclass ConfigParser to give a warning whenever a value # is taken from the defaults. config = ConfigParser( defaults={ # Define either the first or all others. 'celery_tasks.broker': '', 'celery_tasks.imap.broker': '', 'celery_tasks.notification_dispatch.broker': '', 'celery_tasks.notify.broker': '', 'celery_tasks.translate.broker': '', # num_workers: These are production values 'celery_tasks.imap.num_workers': '1', 'celery_tasks.notification_dispatch.num_workers': '1', 'celery_tasks.notify.num_workers': '2', 'celery_tasks.translate.num_workers': '2', # Sensible defaults 'autostart_virtuoso': 'false', 'autostart_celery_imap': 'false', 'autostart_celery_notification_dispatch': 'true', 'autostart_celery_notify': 'true', 'autostart_celery_notify_beat': 'true', 'autostart_celery_translate': 'false', 'autostart_source_reader': 'true', 'autostart_changes_router': 'true', 'autostart_pserve': 'false', 'autostart_nodesass': 'false', 'autostart_gulp': 'false', 'autostart_uwsgi': 'false', 'autostart_metrics_server': 'false', 'autostart_edgesense_server': 'false', }) config.read(config_uri) using_virtuoso = 'virtuoso' in config.get('app:assembl', 'sqlalchemy.url') try: metrics_code_dir = config.get('metrics', 'metrics_code_dir') metrics_cl = config.get('metrics', 'metrics_cl') has_metrics_server = (metrics_code_dir and exists(metrics_code_dir) and exists(metrics_cl.split()[0])) except NoSectionError: has_metrics_server = False metrics_cl = '/bin/ls' # innocuous metrics_code_dir = '' try: edgesense_code_dir = config.get('edgesense', 'edgesense_code_dir') edgesense_venv = config.get('edgesense', 'venv') has_edgesense_server = (edgesense_code_dir and exists(edgesense_code_dir) and exists( join(edgesense_venv, 'bin', 'edgesense_catalyst_server'))) except NoSectionError: has_edgesense_server = False edgesense_venv = '/tmp' # innocuous edgesense_code_dir = '' default_celery_broker = config.get(SECTION, 'celery_tasks.broker') imap_celery_broker = config.get( SECTION, 'celery_tasks.imap.broker') or default_celery_broker notif_dispatch_celery_broker = config.get( SECTION, 'celery_tasks.notification_dispatch.broker') or default_celery_broker notify_celery_broker = config.get( SECTION, 'celery_tasks.notify.broker') or default_celery_broker translate_celery_broker = config.get( SECTION, 'celery_tasks.translate.broker') or default_celery_broker assert all((imap_celery_broker, notif_dispatch_celery_broker, notify_celery_broker, translate_celery_broker)), "Define the celery broker" vars = { 'IMAP_CELERY_BROKER': imap_celery_broker, 'NOTIF_DISPATCH_CELERY_BROKER': notif_dispatch_celery_broker, 'NOTIFY_CELERY_BROKER': notify_celery_broker, 'TRANSLATE_CELERY_BROKER': translate_celery_broker, 'IMAP_CELERY_NUM_WORKERS': config.get(SECTION, 'celery_tasks.imap.num_workers'), 'NOTIF_DISPATCH_CELERY_NUM_WORKERS': config.get(SECTION, 'celery_tasks.notification_dispatch.num_workers'), 'NOTIFY_CELERY_NUM_WORKERS': config.get(SECTION, 'celery_tasks.notify.num_workers'), 'TRANSLATE_CELERY_NUM_WORKERS': config.get(SECTION, 'celery_tasks.translate.num_workers'), 'here': dirname(abspath('supervisord.conf')), 'CONFIG_FILE': config_uri, 'autostart_virtuoso': using_virtuoso and config.get('supervisor', 'autostart_virtuoso'), 'autostart_celery_imap': config.get('supervisor', 'autostart_celery_imap'), 'autostart_celery_notification_dispatch': config.get('supervisor', 'autostart_celery_notification_dispatch'), 'autostart_celery_notify': config.get('supervisor', 'autostart_celery_notify'), 'autostart_celery_notify_beat': config.get('supervisor', 'autostart_celery_notify_beat'), 'autostart_celery_translate': config.get('supervisor', 'autostart_celery_translate'), 'autostart_source_reader': config.get('supervisor', 'autostart_source_reader'), 'autostart_changes_router': config.get('supervisor', 'autostart_changes_router'), 'autostart_pserve': config.get('supervisor', 'autostart_pserve'), 'autostart_nodesass': config.get('supervisor', 'autostart_nodesass'), 'autostart_gulp': config.get('supervisor', 'autostart_gulp'), 'autostart_uwsgi': config.get('supervisor', 'autostart_uwsgi'), 'autostart_metrics_server': (config.get('supervisor', 'autostart_metrics_server') if has_metrics_server else 'false'), 'metrics_code_dir': metrics_code_dir, 'metrics_cl': metrics_cl, 'autostart_edgesense_server': (config.get('supervisor', 'autostart_edgesense_server') if has_edgesense_server else 'false'), 'edgesense_venv': edgesense_venv, 'VIRTUAL_ENV': os.environ['VIRTUAL_ENV'], 'edgesense_code_dir': edgesense_code_dir, } if using_virtuoso: vroot = config.get('virtuoso', 'virtuoso_root') if vroot == 'system': # Magic value if system().startswith('Darwin'): vroot = '/usr/local/virtuoso-opensource' else: vroot = '/usr' elif not vroot[0] == '/': # Relative path vroot = join(dirname(dirname(dirname(__file__))), vroot) assert exists(vroot), "virtuoso_root directory does not exist" assert exists(join(vroot, 'bin', 'virtuoso-t')),\ "virtuoso_root directory does not contain bin/virtuoso-t" assert exists('var/db/virtuoso.ini.tmpl'),\ "Please run this script from the assembl root." vroot_var = join(vroot, 'var') if not exists(vroot_var): vroot_var = '/var' vroot_lib = join(vroot, 'lib') assert exists(vroot_lib) if not exists(join(vroot_lib, 'virtodbcu.so'))\ and exists(join(vroot_lib, 'odbc', 'virtodbcu.so')): vroot_lib = join(vroot_lib, 'odbc') vname = 'virtuoso' if not exists(join(vroot, 'share', vname)): names = listdir(join(vroot, 'share')) names = [ n for n in names if exists(join(vroot, 'share', n, 'vad')) ] assert len(names) == 1, "Cannot identify the vad directory" vname = names[0] assert exists(join(vroot_var, 'lib', vname, 'vsp')),\ "Cannot identify the VSP directory" vars.update({ 'VIRTUOSO_SERVER_PORT': config.getint('virtuoso', 'http_port'), 'VIRTUOSO_HOSTNAME': config.get(SECTION, 'public_hostname'), 'VIRTUOSO_PORT': config.getint('virtuoso', 'port'), 'VIRTUOSO_ROOT': vroot, 'VIRTUOSO_ROOT_VAR': vroot_var, 'VIRTUOSO_ROOT_LIB': vroot_lib, 'VIRTUOSO_SUBDIR_NAME': vname, }) else: # dummy values vars.update({ 'VIRTUOSO_SERVER_PORT': 8890, 'VIRTUOSO_HOSTNAME': 'localhost', 'VIRTUOSO_PORT': 1111, 'VIRTUOSO_ROOT': '/usr', 'VIRTUOSO_ROOT_VAR': '/usr/var', 'VIRTUOSO_ROOT_LIB': '/usr/var/lib', 'VIRTUOSO_SUBDIR_NAME': '/usr/share/virtuoso/vad', }) print vars for fname in ( 'var/db/virtuoso.ini', 'odbc.ini', 'supervisord.conf', ): print fname tmpl = open(fname + '.tmpl').read() inifile = open(fname, 'w') inifile.write(tmpl % vars) inifile.close()
config = ConfigParser() try: config.readfp(open(argv[1])) except: try: config.readfp(open('gamebot.ini')) except: print "Syntax:" print " %s [config]" % argv[0] print "" print "If no configuration file is specified or there was an error, it will default to `gamebot.ini'." print "If there was a failure reading the configuration, it will display this message." exit(1) # read config SERVER = config.get('gamebot', 'server') try: PORT = config.getint('gamebot', 'port') except: PORT = DEFAULT_PORT NICK = config.get('gamebot', 'nick') CHANNEL = config.get('gamebot', 'channel') VERSION = 'gamebot hg:%s; http://hg.micolous.id.au/ircbots/' try: VERSION = VERSION % Popen(["hg","id"], stdout=PIPE).communicate()[0].strip() except: VERSION = VERSION % 'unknown' del Popen, PIPE try: NICKSERV_PASS = config.get('gamebot', 'nickserv_pass') except: NICKSERV_PASS = None def update(irc): irc.action(CHANNEL, 'would like to inform you that you all just lost the game.')
def __init__(self): config = ConfigParser() config.read('settings/settings.ini') self.difficulty = config.get("Game", "difficulty") self.name = config.get("PlayerDefaults", "name") self.goal = config.get("Game", "rooms")
def run(): print "This is makeFermiFiles." parser = ConfigParser() parser.read('config.ini') #parameters from config file Name = str(parser.get('params','Name')) RA = float(parser.get('params','RA')) DEC = float(parser.get('params','DEC')) minEnergy = float(parser.get('params','minEnergy')) maxEnergy = float(parser.get('params','maxEnergy')) SCFile = str(parser.get('params','SCFile')) radius = float(parser.get('params','radius')) binsz = float(parser.get('params','binsz')) TSTART = float(parser.get('params','TSTART')) TSTOP = float(parser.get('params','TSTOP')) Infile = str(parser.get('params','Infile')) bins = int(parser.get('params','bins')) zmax = float(parser.get('params','zmax')) evclass = int(parser.get('params','evclass')) evtype = int(parser.get('params','evtype')) f = FermiObject() if evtype == 512: irf = "P8R2_ULTRACLEAN_V6" elif evtype == 128: irf = "P8R2_SOURCE_V6" elif evtype == 256: irf = "P8R2_CLEAN_V6" elif evtype == 1024: irf = "P8R2_ULTRACLEANVETO_V6" """ Following steps execute Fermi Tool gtselect """ print('\nWorking on file.') print('Cutting file to fit desired parameters . . .\n') f._setEvclass(evclass) f._setEvtype(evtype) f._setRa(RA) f._setDec(DEC) f._setRad(radius) f._setEmin(minEnergy) f._setEmax(maxEnergy) f._setZmax(zmax) f._setTmin(TSTART) f._setTmax(TSTOP) f._setInfile(Infile) f._setOutfile( Name + '_gtselect.fits') f.amonSelect() print('File cuts have been made. Now making cuts for GTI using spacecraft file.') """ Following steps execute Fermi Tool gtmktime """ f._setScfile(SCFile) f._setRoicut('no') f._setEvfile( Name + '_gtselect.fits') f._setOutfile( Name + '_gtmktime.fits') ############################################### # Filter expression # Filter = '(DATA_QUAL>0)&&(LAT_CONFIG==1)' ############################################### f._setFilter(Filter) print('Working on file ' + str(f.getOutfile()) + '. . .') f.amonTime() print('File cuts have been made. Now begining construction of the counts map from event data.') """ Following steps execute Fermi Tool gtbin to create the counts map """ f._setAlgorithm('CMAP') f._setEvfile( Name + '_gtmktime.fits') f._setOutfile( Name + '_cmap.fits') f._setScfile('NONE') num_pix = int((2*radius)/float(binsz)) print "Counts map is " + str(num_pix) + " by " + str(num_pix) + " pixels." f._setNxpix(num_pix) f._setNypix(num_pix) f._setBinsz(binsz) f._setCoordsys('CEL') f._setAxisrot(0) f._setProj('AIT') f.amonBincmap() print('Counts map is complete. Now begining construction of the counts cube.') """ Following steps execute Fermi Tool gtbin to create counts cube (3D counts map). """ f._setAlgorithm('CCUBE') f._setOutfile( Name + '_ccube.fits') pix = int((sqrt(2)*radius)/float(binsz)) print "Counts cube is " + str(pix) + " by " + str(pix) + " pixels." f._setNxpix(pix) f._setNypix(pix) ebin = int(10*log10(maxEnergy/minEnergy)) print "There are " + str(ebin) + " logarithmically uniform energy bins." f._setEnumbins(ebin) f.amonBinccube() print('Counts cube is complete.\nMoving on to multiprocessing version of gtltcube.') #You will want to run gtltcube_mp.py intead of the standard #Fermi Tool gtltcube as this script enables multiprocessing #capabilities that greatly increases the speed. #print "Now working on ltcube file using gtltcube_mp.py.\n" #gtltcube_mp(bins, SCFile, Name + '_gtmktime.fits', Name + '_ltcube.fits', False, zmax) #print "\nltcube complete.\nMoving to compute exposure map with gtexpcube2.\n" my_apps.expCube['evfile'] = Name + '_gtmktime.fits' my_apps.expCube['scfile'] = SCFile my_apps.expCube['outfile'] = Name + '_ltcube.fits' my_apps.expCube['dcostheta'] = 0.025 my_apps.expCube['binsz'] = 1 my_apps.expCube['phibins'] = 0 my_apps.expCube['zmax'] = zmax my_apps.expCube['chatter'] = 0 my_apps.expCube.run() f._setInfile(Name + '_ltcube.fits') cubePix = int((2*radius + 20)/float(binsz)) print "Exposure map is " + str(cubePix) + " by " + str(cubePix) + " pixels." f._setNxpix(cubePix) f._setNypix(cubePix) f._setBinsz(binsz) f._setCoordsys('CEL') f._setRa(RA) f._setDec(DEC) f._setAxisrot(0) f._setProj('AIT') f._setEmin(minEnergy) f._setEmax(maxEnergy) f._setEnumbins(ebin) f._setOutfile( Name + '_expcube.fits') f._setIrfs("P8R2_SOURCE_V6") f.amonExpcube2() print "Finished making exposure map.\nProgram complete.\n"
from netcontrolldap import netcontrolldap from pprint import pprint from ConfigParser import ConfigParser cfg = ConfigParser() cfg.read('/etc/ldap/netcontrol') server = cfg.get('base', 'server') bindDN = cfg.get('base', 'bindDN') adminPW = cfg.get('base', 'adminPW') del cfg o = netcontrolldap.LDAPConnection(server, ssl=True, admPasswd=adminPW, baseDN=bindDN) print o.delUser('jeiks', removeHome=True) o.closeConnection()
_fortran_indexing = fortran_indexing def get_fortran_indexing(): """ Return the current ``fortran_indexing`` global setting. ``True`` for 1-based indexing; ``False`` for 0-based indexing. """ global _fortran_indexing return _fortran_indexing if 'general' in cfg.sections(): if 'fortran_indexing' in cfg.options('general'): set_fortran_indexing(bool(cfg.get('general', 'fortran_indexing'))) if 'logging' in cfg.sections(): if 'level' in cfg.options('logging'): logging.root.setLevel(getattr(logging, cfg.get('logging', 'level'))) disabled_modules = [] if 'modules' in cfg.sections(): for name, value in cfg.items('modules'): if not int(value): disabled_modules.append(name) # External dependencies available_modules = [] unavailable_modules = []
self.proxy_socket.close() if __name__ == '__main__': print len(sys.argv) if len(sys.argv) == 1: print "Use with cfg file in cmd line" sys.exit(1) parser = ConfigParser() #parser.read("coder0.cfg") parser.read(sys.argv[1]) mode = int(parser.get('default', 'mode')) coder = int(parser.get('default', 'coder')) port = int(parser.get('default', 'port')) isfpga = int(parser.get('default', 'isFpga')); packetsize = int(parser.get('default', 'packetSize')); portCtrl = int(parser.get('default', 'portCtrl')) portDest = int(parser.get('default', 'portDest')) ip = parser.get('default', 'ip').replace('"', '') manager = KeyManager(packet_size=packetsize) server = Proxy(listen_address=('0.0.0.0', port), target_address=(ip, portDest), codec=Codec(key_manager=manager, first_slot=coder == 1, is_fpga=isfpga == 1), work_mode=mode, is_coder=coder == 1)
import os try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser # Config config = ConfigParser() config.read([ os.path.expanduser(os.environ.get('RUGMI_CONF', '~/.rugmi.conf')), "/etc/rugmi.conf", ]) # get config keys = list(map(lambda a: a.strip(), config.get("server", "keys").split(","))) url = config.get("server", "url").rstrip("/").encode("utf8") store_path = config.get("server", "store_path").rstrip("/").encode("utf8") debug = config.getboolean("server", "debug")
def load_test_properties(test): cfg = ConfigParser() if len(cfg.read("test.properties")) == 0: cfg.read("../test.properties") test.admin_hostname = cfg.get(pconstant.ADMIN, pconstant.HOST) test.admin_account_name = cfg.get(pconstant.ADMIN, pconstant.USERNAME) test.admin_password = cfg.get(pconstant.ADMIN, pconstant.PASSWORD) test.domain = cfg.get(pconstant.DOMAIN, pconstant.NAME) test.hostname = cfg.get(pconstant.DOMAIN, pconstant.HOST) test.domain_key = cfg.get(pconstant.DOMAIN, pconstant.KEY) test.username = cfg.get(pconstant.AUTH, pconstant.USERNAME) test.account_name = '%s@%s' % (test.username, test.domain) test.account_id = cfg.get(pconstant.AUTH, pconstant.ID) test.password = cfg.get(pconstant.AUTH, pconstant.PASSWORD) test.token = cfg.get(pconstant.AUTH, pconstant.TOKEN) test.session_id = cfg.get(pconstant.AUTH, pconstant.SESSION_ID)