class Command: def __init__(self, options): self.options = options self.config = ConfigParser() self.config.readfp(open(self.options.config_path)) def execute(self): track = self.config.get('twitter', 'track').split(',') follow = self.config.get('twitter', 'follow').split(',') worker = Worker(self.config.get('redis', 'host'), int(self.config.get('redis', 'port'))) client = Client(self.config.get('twitter', 'username'), self.config.get('twitter', 'password'), track, follow) client.add_callback(worker.perform) client.connect() client.start() def run(self): if self.options.daemonize: context = daemon.DaemonContext( pidfile=lockfile.FileLock(self.config.get('prize', 'pid_path')) ) with context: self.execute() else: self.execute()
def test_yaml_snippet(self): if not self.xml_filename or not self.yaml_filename: return xml_filepath = os.path.join(self.fixtures_path, self.xml_filename) expected_xml = u"%s" % open(xml_filepath, 'r').read() yaml_filepath = os.path.join(self.fixtures_path, self.yaml_filename) if self.conf_filename: config = ConfigParser() conf_filepath = os.path.join(self.fixtures_path, self.conf_filename) config.readfp(open(conf_filepath)) else: config = None parser = YamlParser(config) parser.parse(yaml_filepath) # Generate the XML tree parser.generateXML() parser.jobs.sort(key=operator.attrgetter('name')) # Prettify generated XML pretty_xml = "\n".join(job.output() for job in parser.jobs) self.assertThat( pretty_xml, testtools.matchers.DocTestMatches(expected_xml, doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE | doctest.REPORT_NDIFF) )
def __init__(self, stdin="/dev/null", stdout="/dev/null", stderr="/dev/null"): self.data = {} self.stdin = stdin self.stdout = stdout self.stderr = stderr # Configuration import config = ConfigParser() config.readfp(open(CURRENT_DIR + "include/" + "mong.conf", "rb")) # Append to data stack # self.pidfile = config.get('Server', 'pidfile') # Database initialization host = config.get("Database", "host") user = config.get("Database", "user") passwd = config.get("Database", "passwd") dbname = config.get("Database", "dbname") db = dbApi(host, user, passwd, dbname) # Setting up the pid file pidfile = config.get("Server", "pidfile") self.pidfile = pidfile # Setting up the port for the telnet server port = config.get("Server", "port") self.port = int(port) # Append to data stack self.data["database"] = db self.data["config"] = config self.data["pidfile"] = pidfile self.data["port"] = port
def main(): try: voicefile = sys.argv[1] featconfpath = sys.argv[2] switch = sys.argv[3] except IndexError: print("USAGE: ttslab_make_wordunits.py VOICEFILE FEATSCONF [auto | make_features | make_catalogue]") sys.exit() voice = ttslab.fromfile(voicefile) with open(featconfpath) as conffh: featconfig = ConfigParser() featconfig.readfp(conffh) try: if switch == "auto": auto(featconfig, voice) elif switch == "make_features": make_features(featconfig) elif switch == "make_catalogue": make_catalogue(voice) else: raise CLIException except CLIException: print("USAGE: ttslab_make_wordunits.py VOICEFILE FEATSCONF [auto | make_features | make_catalogue]")
def load_conf(path=None, file=None): """Load configuration in global var CONF :param path: The path to the configuration file :param file: If provided read instead the file like object """ conf = ConfigParser() if file: try: conf.read_file(file, path) except AttributeError: # read_file only exists in Python3 conf.readfp(file) return conf confpath = None if not path: try: confpath = os.environ['DULWICH_SWIFT_CFG'] except KeyError: raise Exception("You need to specify a configuration file") else: confpath = path if not os.path.isfile(confpath): raise Exception("Unable to read configuration file %s" % confpath) conf.read(confpath) return conf
def load_ini_config(): try: # Python 2 from ConfigParser import ConfigParser from StringIO import StringIO except ImportError: # Python 3 from configparser import ConfigParser from io import StringIO # By using `allow_no_value=True` we are allowed to # write `--force` instead of `--force=true` below. config = ConfigParser(allow_no_value=True) # Pretend that we load the following INI file: source = ''' [default-arguments] --force --baud=19200 <host>=localhost ''' # ConfigParser requires a file-like object and # no leading whitespace. config_file = StringIO('\n'.join(source.split())) config.readfp(config_file) # ConfigParsers sets keys which have no value # (like `--force` above) to `None`. Thus we # need to substitute all `None` with `True`. return dict((key, True if value is None else value) for key, value in config.items('default-arguments'))
def write_last_synced(self, _value, _job, _new, _smb_connection=None): """ Write _value to the last_synced property of the destination status file :param _value: The value to write :param _job: A SyncJob instance :param _new: If there isn't already a history-section :param _smb_connection: An SMB connection. :return: """ if _new is None: if _smb_connection is not None: _file_obj = read_string_file_smb(_smb_connection, os.path.join(_job.destination_folder, 'ofs_status.txt')) else: try: _file_obj = open(os.path.join(_job.destination_folder, 'ofs_status.txt'), "r") except IOError: pass else: _file_obj = StringIO.StringIO() _cfg = ConfigParser() _cfg.readfp(_file_obj) if _new is not None: _cfg.add_section("history") _cfg.set("history", "last_synced", _value) _cfg.write(_file_obj) if _smb_connection is not None: write_string_file_smb(_smb_connection, os.path.join(_job.destination_folder, 'ofs_status.txt'), _file_obj) else: try: _file = open(os.path.join(_job.destination_folder, 'ofs_status.txt'), "r") except IOError: pass
def load_options(): if not os.path.exists(conf_path): print("Sorry but I can't find the config file. Please fill the " "following template and save it to %s" % conf_path) print(""" ; Sample upc config file ; The below sample bucket section shows all possible config values, ; create one or more 'real' bucket sections to be able to control them under ; upc. ;[bucket_name] ;username=foo ;password=bar ;endpoint= ;timeout= ;chunksize= ;[more_buckets] ; your bucket name ;..""") sys.exit(2) options = ConfigParser() with open(conf_path, 'r') as f: options.readfp(f) return options
def _makeInstance(self, id, portal_type, subdir, import_context): context = self.context properties = import_context.readDataFile('.properties', '%s/%s' % (subdir, id)) tool = getToolByName(context, 'portal_types') try: tool.constructContent(portal_type, context, id) except ValueError: # invalid type return None content = context._getOb(id) if properties is not None: lines = properties.splitlines() stream = StringIO('\n'.join(lines)) parser = ConfigParser(defaults={'title': '', 'description': 'NONE'}) parser.readfp(stream) title = parser.get('DEFAULT', 'title') description = parser.get('DEFAULT', 'description') content.setTitle(title) content.setDescription(description) return content
def process_l10n_ini(inifile): """Read a Mozilla l10n.ini file and process it to find the localisation files needed by a project""" l10n = ConfigParser() l10n.readfp(open(path_neutral(inifile))) l10n_ini_path = os.path.dirname(inifile) for dir in l10n.get('compare', 'dirs').split(): frompath = os.path.join(l10n_ini_path, l10n.get('general', 'depth'), dir, 'locales', 'en-US') if verbose: print '%s -> %s' % (frompath, os.path.join(l10ncheckout, 'en-US', dir)) try: shutil.copytree(frompath, os.path.join(l10ncheckout, 'en-US', dir)) except OSError: print 'ERROR: %s does not exist' % frompath try: for include in l10n.options('includes'): include_ini = os.path.join( l10n_ini_path, l10n.get('general', 'depth'), l10n.get('includes', include) ) if os.path.isfile(include_ini): process_l10n_ini(include_ini) except TypeError: pass except NoSectionError: pass
def get_classes_from_config_file(config_path="~/.openerp_serverrc"): addons = {} config_parser = ConfigParser() config_parser.readfp(open(os.path.expanduser(config_path))) addons_folders = map(lambda x: x.strip(), config_parser.get("options", "addons_path").split(",")) for addons_folder in addons_folders: addons_folder = path(addons_folder) for addon in addons_folder.dirs(): addons[addon.name] = {} if addon.joinpath("__openerp__.py").exists(): addons[addon.name]["__openerp__"] = eval(open(addon.joinpath("__openerp__.py"), "r").read()) elif addon.joinpath("__terp__.py").exists(): addons[addon.name]["__openerp__"] = eval(open(addon.joinpath("__terp__.py"), "r").read()) else: del addons[addon.name] continue addons[addon.name]["models"] = {} for python_file in addon.walk("*.py"): if python_file.name.startswith("_"): continue models = get_classes_from_string(open(python_file).read()) for model in models.keys(): models[model]["file"] = python_file addons[addon.name]["models"].update(models) addons[addon.name]["xml"] = {"views": {}, "actions": {}} for xml_file in addon.walk("*.xml"): xml = get_views_from_string(open(xml_file, "r").read()) addons[addon.name]["xml"]["views"].update(xml["views"]) addons[addon.name]["xml"]["actions"].update(xml["actions"]) return addons
def render_admin_panel(self, req, cat, page, path_info): assert req.perm.has_permission('TRAC_ADMIN') # get default authz file from trac.ini repository_dir = self.config.get('trac', 'repository_dir') + '/.hg/hgrc' repository_type = self.config.get('trac', 'repository_type') # test if authz file exists and is writable if not os.access(repository_dir,os.W_OK|os.R_OK): raise TracError("Can't access repository %s" % repository_dir) # evaluate forms if req.method == 'POST': current=req.args.get('current').strip().replace('\r', '') # encode to utf-8 current = current.encode('utf-8') # parse and validate authz file with a config parser from ConfigParser import ConfigParser from StringIO import StringIO cp = ConfigParser() try: cp.readfp(StringIO(current)) except Exception, e: raise TracError("Invalid Syntax: %s" % e) # write to disk try: fp = open(repository_dir, 'wb') current = fp.write(current) fp.close() except Exception, e: raise TracError("Can't write authz file: %s" % e)
def __load_plugin(self, name, **kwargs): kwargs['verbose'] = conf.VERBOSE kwargs['debug'] = conf.DEBUG try: self.__plugins = __import__('plugins.' + name, globals(), locals(), [], -1) try: config = ConfigParser() config.readfp(open("plugins/" + name + "/plugin.cfg")) kwargs['config'] = config plugin = eval('plugins.' + name + '.' + 'Plugin(**kwargs)', globals(), {"plugins":self.__plugins, "kwargs":kwargs}) del(kwargs['config']) except IOError as e: plugin = eval('plugins.' + name + '.' + 'Plugin()', globals(), {"plugins":self.__plugins}) self.__functions[0].append(name) self.__functions[1].append(plugin) self.__functions[2].append(hasattr(plugin, 'cmd') and isinstance(plugin.cmd, MethodType)) self.__functions[3].append(hasattr(plugin, 'listen') and isinstance(plugin.listen, MethodType)) self.__functions[4].append(hasattr(plugin, 'help') and isinstance(plugin.help, MethodType)) self.__functions[5].append(hasattr(plugin, 'stop') and isinstance(plugin.stop, MethodType)) self.__functions[6].append([]) return True except Exception as e: stderr.write(repr(e) + "\n") return False
def __init__(self): handlers = [ (r"/", BrowseHandler), (r"/get/(.*)\.md5sum", SumHandler), (r"/get/(.*)\.zip", ZipHandler), (r"/get/(.*/CHANGES.txt)", tornado.web.StaticFileHandler, {"path": "/opt/www/mirror"}), (r"/get/(.*)", Base62Handler), (r"/api", ApiHandler), (r"/mirror", MirrorApplicationHandler), (r"/static/(.*)", tornado.web.StaticFileHandler, {"path": os.path.join(os.path.dirname(__file__), "static")}), ] settings = dict( debug=options.debug, ) super(Application, self).__init__(handlers, **settings) config = ConfigParser() config.readfp(open(options.config)) # One global connection init_database(create_engine(config.get('database', 'uri'))) self.db = DBSession template_path = os.path.join(os.path.dirname(__file__), "templates") self.lookup = TemplateLookup(directories=[template_path], filesystem_checks=False) self.mirrorpool = WeightedChoice(( ('http://mirror.galliumos.org/%s', 1000), ))
def executable_check(rootdir, outputdir, input_path, verbose=False): ltp_test_exe = os.path.join(rootdir, "bin/ltp_test") if not which(ltp_test_exe): logging.error("ltp_test: ltp_test is not executable.") logging.info("ltp_test: all dynamic checks are skipped.") return False original_config_path = os.path.join(os.path.join(rootdir, "conf"), "ltp.cnf") cfg_str = '[root]\n' + open(original_config_path, "r").read() cfg = ConfigParser() cfg.readfp(StringIO.StringIO(cfg_str)) config_path = "/tmp/ltp.autotest.ltp.conf" cofs = open(config_path, "w") print >> cofs, "target = all" def concatenate(name): model = cfg.get("root", name) if not model.startswith("/"): print >> cofs, ("%s = %s" % (name, os.path.join(rootdir, model))) concatenate("segmentor-model") concatenate("postagger-model") concatenate("parser-model") concatenate("ner-model") concatenate("srl-data") cofs.close() command = [ltp_test_exe, config_path, "srl", input_path] logging.info("ltp_test: dynamically executable check is running.") ofs= open(os.path.join(outputdir, "output.txt"), "w") subprocess.call(command, stdout=ofs, stderr=DUMMY) ofs.close() logging.info("ltp_test: dynamically executable check is done.") return True
def __init__(self): handlers = [ (r"/", BrowseHandler), (r"/get/(.*)\.md5sum", SumHandler), (r"/get/(.*)\.zip", ZipHandler), (r"/get/(.*/CHANGES.txt)", tornado.web.StaticFileHandler, {"path": "/opt/www/mirror"}), (r"/get/(.*)", Base62Handler), (r"/rss", RssHandler), (r"/api", ApiHandler), (r"/mirror", MirrorApplicationHandler), (r"/static/(.*)", tornado.web.StaticFileHandler, {"path": os.path.join(os.path.dirname(__file__), "static")}), ] settings = dict( debug=options.debug, ) super(Application, self).__init__(handlers, **settings) config = ConfigParser() config.readfp(open(options.config)) # One global connection init_database(create_engine(config.get('database', 'uri'))) self.db = DBSession template_path = os.path.join(os.path.dirname(__file__), "templates") self.lookup = TemplateLookup(directories=[template_path], filesystem_checks=False) self.mirrorpool = WeightedChoice(( ('http://mirror.slc.cyanogenmod.org/%s', 2000), ('http://oss.reflected.net/%s', 1000), ('http://mirror.symnds.com/software/cm/%s', 1000), ('http://mirror.netcologne.de/cyanogenmod/%s', 200), ('http://cm.sponsored.cb-webhosting.de/%s', 25), ('http://mirror.i3d.net/pub/cyanogenmod/%s', 50), ))
def __init__(self): handlers = [ (r"/", BrowseHandler), (r"/get/(.*)\.md5sum", SumHandler), (r"/get/(.*)\.zip", ZipHandler), (r"/get/(.*/CHANGES.txt)", tornado.web.StaticFileHandler, {"path": "/opt/www/mirror"}), (r"/get/(.*)", Base62Handler), (r"/rss", RssHandler), (r"/api", ApiHandler), (r"/mirror", MirrorApplicationHandler), ] settings = dict( debug=options.debug ) super(Application, self).__init__(handlers, **settings) config = ConfigParser() config.readfp(open(options.config)) # One global connection init_database(create_engine(config.get('database', 'uri'))) self.db = DBSession template_path = os.path.join(os.path.dirname(__file__), "templates") self.lookup = TemplateLookup(directories=[template_path]) self.activebuilds = ActiveBuilds() #self.stats = Stats() self.mirrorpool = WeightedChoice(( ('http://oss.reflected.net/%s', 1000), ('http://mirror.sea.tdrevolution.net/%s', 500), ('http://cm.sponsored.cb-webhosting.de/%s', 50), ('http://mirror.netcologne.de/cyanogenmod/%s', 75), ))
def read_last_synced(self, _job, _smb_connection=None): """ Read the date and time from the last_synced property of the destination status file :param _job: A SyncJob instance :param _smb_connection: An SMB connection. :return: """ _cfg = ConfigParser() _file_obj = None # Is it a remote host? if _smb_connection is not None: _file_obj = read_string_file_smb(_smb_connection, os.path.join(_job.destination_folder, 'ofs_status.txt')) else: try: _file_obj = open(os.path.join(_job.destination_folder, 'ofs_status.txt'), "r") except IOError: _file_obj = None if _file_obj: _cfg.readfp(_file_obj) _file_obj.close() _result = _cfg.get("history", "last_synced") if _result is not None: return datetime.datetime.strptime(_result, "%Y-%m-%d %H:%M:%S.%f") else: return None
def __init__(self, start_urls=None, max_links=500, max_offsite_ttl=10, max_depth=10, db_conn = None, *args, **kwargs): # Initialize parent class super(DiscoverSpider, self).__init__(*args, **kwargs) cfg = ConfigParser() cfg.readfp(open(os.path.join(os.path.dirname(__file__), '..', 'scrapy.properties'))) # Now our local initialization # Expression used to find links not inside @href's self.link_pattern = re.compile('http://[^"\'><\n\t ]+') # Read urls from either the database or the option self.start_urls = ["http://%s"%k for k in self.tbl_src if "." in k] #~ self.site = [urlsplit(url).netloc for url in self.start_urls][0].replace("www.","") self.max_links = max_links # max num of links to follow per page self.max_offsite_ttl = max_offsite_ttl # time-to-live jumps offsite self.max_depth = max_depth # max depth it will go self.torrent_stores = cfg.get("parameters", "torrent_stores").split(",") for store in self.torrent_stores: if "http://%s"%store in self.start_urls: self.start_urls = ",".join(self.start_urls).replace("http://%s,"%store,"").split(",") #ensure table domain for src in self.tbl_src: self.save_domain(src)
def load(app): """ Load the configuration and apply it to the app. :param app: a flask app :type app: Flask :raises IOError: iff a non-default config path is specified but does not exist """ # load default values from the included default config file try: with pkg_resources.resource_stream(__package__, DEFAULT_CONFIG_RESOURCE) as default_config: parser = ConfigParser() parser.readfp(default_config) read_config(app, parser) except IOError: _logger.error('could not open default config file') raise # load user-specified config values config_path = os.environ.get(CONFIG_ENV_NAME) or CONFIG_PATH try: with open(config_path) as config_file: parser = ConfigParser() parser.readfp(config_file) read_config(app, parser) _logger.info('config loaded from %s' % config_path) except IOError: if config_path != CONFIG_PATH: _logger.error('config file not found at path %s' % config_path) raise # if the user did not specify a config path and there is not a file # at the default path, just use the default settings. _logger.info('no config specified or found, so using defaults')
class RepoConfig(object): ''' This class provides an abstraction around repository configuration files ''' def __init__(self, path): ''' :param path: Plumbum path to the repository ''' self._config = ConfigParser() self._path = path / '.hg' / 'hgrc' if self._path.exists(): self._config.readfp(self._path.open()) else: self._config.add_section('paths') def AddRemote(self, name, destination): ''' Adds a remote to the config, or overwrites if it already exists :param name: The name of the remote :param destination: The destination path of the remote ''' self._config.set('paths', name, destination) self._config.write(self._path.open('w')) @property def remotes(self): ''' Property to get a dictionary of remotes ''' return dict(self._config.items('paths'))
def test_export_site_with_subfolders(self): from Products.GenericSetup.utils import _getDottedName self._setUpAdapters() FOLDER_IDS = ('foo', 'bar', 'baz') site = _makeFolder('site') site.title = 'AAA' site._setProperty('description', 'BBB') aside = _makeFolder('aside') dotted = _getDottedName(aside.__class__) for id in FOLDER_IDS: folder = _makeFolder(id) folder.title = 'Title: %s' % id site._setObject(id, folder) context = DummyExportContext(site) exporter = self._getExporter() exporter(context) self.assertEqual(len(context._wrote), 2 + (2 *len(FOLDER_IDS))) filename, text, content_type = context._wrote[0] self.assertEqual(filename, 'structure/.objects') self.assertEqual(content_type, 'text/comma-separated-values') objects = [x for x in reader(StringIO(text))] self.assertEqual(len(objects), 3) for index in range(len(FOLDER_IDS)): id = FOLDER_IDS[index] self.assertEqual(objects[index][0], id) self.assertEqual(objects[index][1], dotted) filename, text, content_type = context._wrote[2 + (2 * index)] self.assertEqual(filename, '/'.join(('structure', id, '.objects'))) self.assertEqual(content_type, 'text/comma-separated-values') subobjects = [x for x in reader(StringIO(text))] self.assertEqual(len(subobjects), 0) filename, text, content_type = context._wrote[2 + (2 * index) + 1] self.assertEqual(filename, '/'.join(('structure', id, '.properties'))) self.assertEqual(content_type, 'text/plain') parser = ConfigParser() parser.readfp(StringIO(text)) defaults = parser.defaults() self.assertEqual(len(defaults), 1) self.assertEqual(defaults['title'], 'Title: %s' % id) filename, text, content_type = context._wrote[1] self.assertEqual(filename, 'structure/.properties') self.assertEqual(content_type, 'text/plain') parser = ConfigParser() parser.readfp(StringIO(text)) defaults = parser.defaults() self.assertEqual(len(defaults), 2) self.assertEqual(defaults['title'], 'AAA') self.assertEqual(defaults['description'], 'BBB')
def _get_attach_points(self, info, size_request): has_attach_points_, attach_points = info.get_attach_points() attach_x = attach_y = 0 if attach_points: # this works only for Gtk < 3.14 # https://developer.gnome.org/gtk3/stable/GtkIconTheme.html # #gtk-icon-info-get-attach-points attach_x = float(attach_points[0].x) / size_request attach_y = float(attach_points[0].y) / size_request elif info.get_filename(): # try read from the .icon file icon_filename = info.get_filename().replace('.svg', '.icon') if os.path.exists(icon_filename): try: with open(icon_filename) as config_file: cp = ConfigParser() cp.readfp(config_file) attach_points_str = cp.get('Icon Data', 'AttachPoints') attach_points = attach_points_str.split(',') attach_x = float(attach_points[0].strip()) / 1000 attach_y = float(attach_points[1].strip()) / 1000 except Exception as e: logging.exception('Exception reading icon info: %s', e) return attach_x, attach_y
def test_export_empty_site_with_setup_tool(self): self._setUpAdapters() site = _makeFolder('site') site._setObject('setup_tool', self._makeSetupTool()) site._updateProperty('title', 'test_export_empty_site_with_setup_tool') site._setProperty('description', 'Testing export of an empty site with setup tool.') context = DummyExportContext(site) exporter = self._getExporter() exporter(context) self.assertEqual(len(context._wrote), 2) filename, text, content_type = context._wrote[0] self.assertEqual(filename, 'structure/.objects') self.assertEqual(content_type, 'text/comma-separated-values') objects = [x for x in reader(StringIO(text))] self.assertEqual(len(objects), 0) filename, text, content_type = context._wrote[1] self.assertEqual(filename, 'structure/.properties') self.assertEqual(content_type, 'text/plain') parser = ConfigParser() parser.readfp(StringIO(text)) defaults = parser.defaults() self.assertEqual(len(defaults), 2) self.assertEqual(defaults['title'], site.title) self.assertEqual(defaults['description'], site.description)
def load_config(filename, defaults=globals(), verbose=False): conf = ConfigParser() conf.readfp(open(filename)) if verbose: print('Load config from %s file !' % filename) # Load min/max minmax = ('min', 'max') minmax4 = ('xmin', 'xmax', 'ymin', 'ymax') configuration = {} for section, kind in ( ('zone_limits', minmax4), ('carea_limits', minmax), ('width_limits', minmax), ('height_limits', minmax), ('threshold_limits', minmax), ('approx_poly_length', minmax), ('contour_limits', minmax), ('erode_kernel', 'value'), ('dilate_kernel', 'value')): try: if type(kind) == tuple: configuration[section] = [ int(conf.get(section, val)) for val in kind ] elif type(kind) == str: configuration[section] = int(conf.get(section, kind)) except NoSectionError: configuration[section] = defaults[section] return configuration
def test_export_empty_site(self): self._setUpAdapters() site = _makeFolder('site') site.title = 'test_export_empty_site' site.description = 'Testing export of an empty site.' context = DummyExportContext(site) exporter = self._getExporter() exporter(context) self.assertEqual(len(context._wrote), 2) filename, text, content_type = context._wrote[0] self.assertEqual(filename, 'structure/.objects') self.assertEqual(content_type, 'text/comma-separated-values') objects = [x for x in reader(StringIO(text))] self.assertEqual(len(objects), 0) filename, text, content_type = context._wrote[1] self.assertEqual(filename, 'structure/.properties') self.assertEqual(content_type, 'text/plain') parser = ConfigParser() parser.readfp(StringIO(text)) defaults = parser.defaults() self.assertEqual(len(defaults), 1) self.assertEqual(defaults['title'], site.title)
def test_export_site_with_non_exportable_simple_items(self): self._setUpAdapters() ITEM_IDS = ('foo', 'bar', 'baz') site = _makeFolder('site', site_folder=True) site.title = 'AAA' site.description = 'BBB' for id in ITEM_IDS: site._setObject(id, _makeItem(id)) context = DummyExportContext(site) exporter = self._getExporter() exporter(context) self.assertEqual(len(context._wrote), 2) filename, text, content_type = context._wrote[0] self.assertEqual(filename, 'structure/.objects') self.assertEqual(content_type, 'text/comma-separated-values') objects = [x for x in reader(StringIO(text))] self.assertEqual(len(objects), 3) for index in range(len(ITEM_IDS)): self.assertEqual(objects[index][0], ITEM_IDS[index]) self.assertEqual(objects[index][1], TEST_CONTENT) filename, text, content_type = context._wrote[1] self.assertEqual(filename, 'structure/.properties') self.assertEqual(content_type, 'text/plain') parser = ConfigParser() parser.readfp(StringIO(text)) self.assertEqual(parser.get('DEFAULT', 'title'), 'AAA') self.assertEqual(parser.get('DEFAULT', 'description'), 'BBB')
def main(): (opts, args) = op.parse_args() logging.basicConfig(level=logging.DEBUG if opts.verbose else logging.INFO) if not args: op.error("you must provide at least one css file") if opts.in_memory: css_cls = InMemoryCSSFile else: css_cls = CSSFile base = {} if opts.conf: from ConfigParser import ConfigParser cp = ConfigParser() with open(opts.conf) as fp: cp.readfp(fp) base.update(cp.items("spritemapper")) if opts.anneal: base["anneal_steps"] = opts.anneal if opts.padding: base["padding"] = (opts.padding, opts.padding) conf = CSSConfig(base=base) spritemap([css_cls.open_file(fn, conf=conf) for fn in args], conf=conf)
def _find_bundles(): global bundle_icons info_files = [] for root in GLib.get_system_data_dirs(): info_files += glob.glob(os.path.join(root, 'sugar', 'activities', '*.activity', 'activity', 'activity.info')) for path in info_files: fd = open(path, 'rb') cp = ConfigParser() cp.readfp(fd) section = 'Activity' if cp.has_option(section, 'bundle_id'): bundle_id = cp.get(section, 'bundle_id') else: continue if cp.has_option(section, 'icon'): icon = cp.get(section, 'icon') dirname = os.path.dirname(path) bundle_icons[bundle_id] = os.path.join(dirname, icon + '.svg')
def read_config(self): config = ConfigParser() try: with open(self.config_file, 'r') as cfg: config.readfp(cfg) except IOError, e: self.print_shell("Error: config_file not found", e)
def test_export_site_with_exportable_simple_items(self): self._setUpAdapters() ITEM_IDS = ('foo', 'bar', 'baz') site = _makeFolder('site', site_folder=True) site.title = 'AAA' site.description = 'BBB' for id in ITEM_IDS: site._setObject(id, _makeINIAware(id)) context = DummyExportContext(site) exporter = self._getExporter() exporter(context) self.assertEqual(len(context._wrote), 2 + len(ITEM_IDS)) filename, text, content_type = context._wrote[0] self.assertEqual(filename, 'structure/.objects') self.assertEqual(content_type, 'text/comma-separated-values') objects = [x for x in reader(StringIO(text))] self.assertEqual(len(objects), 3) for index in range(len(ITEM_IDS)): self.assertEqual(objects[index][0], ITEM_IDS[index]) self.assertEqual(objects[index][1], TEST_INI_AWARE) filename, text, content_type = context._wrote[index + 2] self.assertEqual(filename, 'structure/%s.ini' % ITEM_IDS[index]) object = site._getOb(ITEM_IDS[index]) self.assertEqual(text.strip(), object.as_ini().strip()) self.assertEqual(content_type, 'text/plain') filename, text, content_type = context._wrote[1] self.assertEqual(filename, 'structure/.properties') self.assertEqual(content_type, 'text/plain') parser = ConfigParser() parser.readfp(StringIO(text)) self.assertEqual(parser.get('DEFAULT', 'title'), 'AAA') self.assertEqual(parser.get('DEFAULT', 'description'), 'BBB')
def loadSubs(self, filename): """Load a substitutions file. The file must be in the Windows-style INI format (see the standard ConfigParser module docs for information on this format). Each section of the file is loaded into its own substituter. """ inFile = file(filename) parser = ConfigParser() parser.readfp(inFile, filename) inFile.close() for s in parser.sections(): # Add a new WordSub instance for this section. If one already # exists, delete it. if self._subbers.has_key(s): del (self._subbers[s]) self._subbers[s] = WordSub() # iterate over the key,value pairs and add them to the subber for k, v in parser.items(s): self._subbers[s][k] = v
def process_l10n_ini(inifile): """Read a Mozilla l10n.ini file and process it to find the localisation files needed by a project""" l10n = ConfigParser() l10n.readfp(open(path_neutral(inifile))) l10n_ini_path = os.path.dirname(inifile) for dir in l10n.get('compare', 'dirs').split(): frompath = os.path.join(l10n_ini_path, l10n.get('general', 'depth'), dir, 'locales', 'en-US') topath = os.path.join(l10ncheckout, 'en-US', dir) if not os.path.exists(frompath): if verbose: print("[Missing source]: %s" % frompath) continue if os.path.exists(topath): if verbose: print("[Existing target]: %s" % topath) continue if verbose: print('%s -> %s' % (frompath, topath)) try: shutil.copytree(frompath, topath) except OSError as e: print(e) try: for include in l10n.options('includes'): include_ini = os.path.join( l10n_ini_path, l10n.get('general', 'depth'), l10n.get('includes', include) ) if os.path.isfile(include_ini): process_l10n_ini(include_ini) except TypeError: pass except NoSectionError: pass
def __init__(self, start_urls=None, max_links=200, max_offsite_ttl=1, max_depth=50, db_conn=None, *args, **kwargs): # Initialize parent class super(SiteSpider, self).__init__(*args, **kwargs) cfg = ConfigParser() cfg.readfp( open( os.path.join(os.path.dirname(__file__), '..', 'scrapy.properties'))) # Now our local initialization # Expression used to find links not inside @href's self.link_pattern = re.compile('http://[^"\'><\n\t ]+') # Read urls from either the database or the option if start_urls is None: raise Exception("start_urls None not permited") self.start_urls = start_urls.split(',') self.site = [urlsplit(url).netloc for url in self.start_urls][0].replace("www.", "") self.max_links = max_links # max num of links to follow per page self.max_offsite_ttl = max_offsite_ttl # time-to-live jumps offsite self.max_depth = max_depth # max depth it will go self.torrent_stores = cfg.get("parameters", "torrent_stores").split(",") self.main_tbl_src = self.get_table_source(self.site) self.load_torrent_pages()
def _process_action(self, action, package_conf): """Process single action from event action list. Gets package boss.conf, parses it and puts the result in package_conf[package_name] """ package = action.get("sourcepackage", None)\ or action.get("deletepackage", None) if not package: # This is not package related action return # In theory sourcepackage and targetpackage can have different names, # but we don't support that. # Guarantee fields.package_conf.<package name> for all packages in # request, even if it is empty. package_conf[package] = {} if action["type"] == "submit": contents = self._get_boss_conf(action["sourceproject"], action["sourcepackage"], action["sourcerevision"]) or "" elif action["type"] == "delete": contents = self._get_boss_conf(action["deleteproject"], action["deletepackage"]) or "" else: self.log.info("Unknown action type '%s'" % action["type"]) return conf = ConfigParser() conf.readfp(StringIO(contents)) for section in conf.sections(): # Create config sections under package package_conf[package][section] = {} for key, value in conf.items(section): # Read key - value pairs in sections package_conf[package][section][key] = value
def proxysql_tools_config(proxysql_instance, cluster_host, cluster_port, cluster_user, cluster_pass, hostgroup_writer, hostgroup_reader, monitor_user, monitor_pass): config_contents = """ [proxysql] host={proxy_host} admin_port={proxy_port} admin_username={proxy_user} admin_password={proxy_pass} monitor_username={monitor_user} monitor_password={monitor_pass} [galera] cluster_host={cluster_host}:{cluster_port} cluster_username={cluster_user} cluster_password={cluster_pass} load_balancing_mode=singlewriter writer_hostgroup_id={writer_hostgroup} reader_hostgroup_id={reader_hostgroup} """.format(proxy_host=proxysql_instance.host, proxy_port=proxysql_instance.port, proxy_user=proxysql_instance.user, proxy_pass=proxysql_instance.password, monitor_user=monitor_user, monitor_pass=monitor_pass, cluster_host=cluster_host, cluster_port=cluster_port, cluster_user=cluster_user, cluster_pass=cluster_pass, writer_hostgroup=hostgroup_writer, reader_hostgroup=hostgroup_reader) config = ConfigParser() config.readfp(io.BytesIO(config_contents)) return config
def __init__(self): handlers = [ (r"/", BrowseHandler), (r"/get/(.*)\.md5sum", SumHandler), (r"/get/(.*)\.zip", ZipHandler), (r"/get/(.*/CHANGES.txt)", tornado.web.StaticFileHandler, { "path": "/opt/www/mirror" }), (r"/get/(.*)", Base62Handler), (r"/rss", RssHandler), (r"/api", ApiHandler), (r"/mirror", MirrorApplicationHandler), (r"/static/(.*)", tornado.web.StaticFileHandler, { "path": os.path.join(os.path.dirname(__file__), "static") }), ] settings = dict(debug=options.debug, ) super(Application, self).__init__(handlers, **settings) config = ConfigParser() config.readfp(open(options.config)) # One global connection init_database(create_engine(config.get('database', 'uri'))) self.db = DBSession template_path = os.path.join(os.path.dirname(__file__), "templates") self.lookup = TemplateLookup(directories=[template_path], filesystem_checks=False) self.mirrorpool = WeightedChoice(( ('http://oss.reflected.net/%s', 1000), ('http://mirror.symnds.com/software/cm/%s', 1000), #('http://mirror.netcologne.de/cyanogenmod/%s', 200), #('http://mirror.sea.tdrevolution.net/%s', 50), ('http://cm.sponsored.cb-webhosting.de/%s', 25), #('http://mirror.i3d.net/pub/cyanogenmod/%s', 50), ))
def _extract_versions_section(session, filename, version_sections=None, relative=None): sys.stderr.write('\n- {0}'.format(filename)) if (relative is not None and "://" not in filename and not filename.startswith('/') and not filename.startswith(relative)): filename = relative + '/' + filename config = ConfigParser() if os.path.isfile(filename): config.read(filename) else: resp = session.get(filename) config.readfp(StringIO(resp.text)) if resp.from_cache: sys.stderr.write('\n from cache') elif resp.status_code != 200: sys.stderr.write('\n ERROR {0:d}'.format(resp.status_code)) else: sys.stderr.write('\n fresh from server') # first read own versions section if config.has_section('versions'): version_sections[filename] = OrderedDict(config.items('versions')) sys.stderr.write('\n {0:d} entries in versions section.'.format( len(version_sections[filename]))) try: extends = config.get('buildout', 'extends').strip() except (NoSectionError, NoOptionError): return version_sections for extend in reversed(extends.splitlines()): extend = extend.strip() if not extend: continue sub_relative = find_relative(extend) or relative _extract_versions_section(session, extend, version_sections, sub_relative) return version_sections
def load_templates(): TEMPLATE_LOCK.acquire() if len(TEMPLATE_TABLE): TEMPLATE_LOCK.release() return LOG.debug('Filling in the template table') tpl_dir = getattr(settings, 'NOTIFICATION_TEMPLATE_DIR', '/usr/share/openstack-auth-shib/templates') try: for tpl_item in os.listdir(tpl_dir): res_match = TEMPLATE_REGEX.search(tpl_item) if not res_match: continue locale = res_match.group(1).lower() TEMPLATE_TABLE[locale] = dict() tpl_filename = os.path.join(tpl_dir, tpl_item) parser = ConfigParser() parser.readfp(open(tpl_filename)) for sect in parser.sections(): sbj = parser.get(sect, 'subject') if parser.has_option(sect, 'subject') else "No subject" body = parser.get(sect, 'body') if parser.has_option(sect, 'body') else "No body" log_tpl = parser.get(sect, 'LOG') if parser.has_option(sect, 'LOG') else "No log" TEMPLATE_TABLE[locale][sect] = NotificationTemplate(sbj, body, log_tpl) except: # # TODO need cleanup?? # LOG.error("Cannot load template table", exc_info=True) TEMPLATE_LOCK.release()
def compile_liquidsoap(filename="liquidsoap"): config = ConfigParser() config.readfp(open('fab_liquidsoap_compile.cfg')) url = config.get('main', 'liquidsoap_tar_url') print "Will get liquidsoap from " + url do_sudo('apt-get update') do_sudo('apt-get upgrade -y --force-yes') do_sudo('apt-get install -y --force-yes ocaml-findlib libao-ocaml-dev libportaudio-ocaml-dev ' + \ 'libmad-ocaml-dev libtaglib-ocaml-dev libalsa-ocaml-dev libtaglib-ocaml-dev libvorbis-ocaml-dev ' + \ 'libspeex-dev libspeexdsp-dev speex libladspa-ocaml-dev festival festival-dev ' + \ 'libsamplerate-dev libxmlplaylist-ocaml-dev libxmlrpc-light-ocaml-dev libflac-dev ' + \ 'libxml-dom-perl libxml-dom-xpath-perl patch autoconf libmp3lame-dev ' + \ 'libcamomile-ocaml-dev libcamlimages-ocaml-dev libtool libpulse-dev libjack-dev camlidl') root = '/home/martin/src' do_run('mkdir -p %s' % root) tmpPath = do_local("mktemp", capture=True) do_run('wget %s -O %s' % (url, tmpPath)) do_run('mv %s %s/liquidsoap.tar.gz' % (tmpPath, root)) do_run('cd %s && tar xzf liquidsoap.tar.gz' % root) do_run('cd %s/liquidsoap-1.0.1-full && cp PACKAGES.minimal PACKAGES' % root) sed('%s/liquidsoap-1.0.1-full/PACKAGES' % root, '#ocaml-portaudio', 'ocaml-portaudio') sed('%s/liquidsoap-1.0.1-full/PACKAGES' % root, '#ocaml-alsa', 'ocaml-alsa') sed('%s/liquidsoap-1.0.1-full/PACKAGES' % root, '#ocaml-pulseaudio', 'ocaml-pulseaudio') do_run('cd %s/liquidsoap-1.0.1-full && ./bootstrap' % root) do_run('cd %s/liquidsoap-1.0.1-full && ./configure' % root) do_run('cd %s/liquidsoap-1.0.1-full && make' % root) get('%s/liquidsoap-1.0.1-full/liquidsoap-1.0.1/src/liquidsoap' % root, filename)
def test_export_site_with_non_exportable_simple_items(self): from Products.GenericSetup.utils import _getDottedName self._setUpAdapters() ITEM_IDS = ('foo', 'bar', 'baz') site = _makeFolder('site') site.title = 'AAA' site._setProperty('description', 'BBB') item = _makeItem('aside') dotted = _getDottedName(item.__class__) for id in ITEM_IDS: site._setObject(id, _makeItem(id)) context = DummyExportContext(site) exporter = self._getExporter() exporter(context) self.assertEqual(len(context._wrote), 2) filename, text, content_type = context._wrote[0] self.assertEqual(filename, 'structure/.objects') self.assertEqual(content_type, 'text/comma-separated-values') objects = [x for x in reader(StringIO(text))] self.assertEqual(len(objects), 3) for index in range(len(ITEM_IDS)): self.assertEqual(objects[index][0], ITEM_IDS[index]) self.assertEqual(objects[index][1], dotted) filename, text, content_type = context._wrote[1] self.assertEqual(filename, 'structure/.properties') self.assertEqual(content_type, 'text/plain') parser = ConfigParser() parser.readfp(StringIO(text)) defaults = parser.defaults() self.assertEqual(len(defaults), 2) self.assertEqual(defaults['title'], 'AAA') self.assertEqual(defaults['description'], 'BBB')
def _read_from_sections(user, collection_url, permission): """Get regex sections.""" filename = os.path.expanduser(config.get("rights", "file")) rights_type = config.get("rights", "type").lower() # Prevent "regex injection" user_escaped = re.escape(user) collection_url_escaped = re.escape(collection_url) regex = ConfigParser({ "login": user_escaped, "path": collection_url_escaped }) if rights_type in DEFINED_RIGHTS: log.LOGGER.debug("Rights type '%s'" % rights_type) regex.readfp(StringIO(DEFINED_RIGHTS[rights_type])) elif rights_type == "from_file": log.LOGGER.debug("Reading rights from file %s" % filename) if not regex.read(filename): log.LOGGER.error("File '%s' not found for rights" % filename) return False else: log.LOGGER.error("Unknown rights type '%s'" % rights_type) return False for section in regex.sections(): re_user = regex.get(section, "user") re_collection = regex.get(section, "collection") log.LOGGER.debug( "Test if '%s:%s' matches against '%s:%s' from section '%s'" % (user, collection_url, re_user, re_collection, section)) user_match = re.match(re_user, user) if user_match: re_collection = re_collection.format(*user_match.groups()) if re.match(re_collection, collection_url): log.LOGGER.debug("Section '%s' matches" % section) return permission in regex.get(section, "permission") else: log.LOGGER.debug("Section '%s' does not match" % section) return False
def reset(self, name): # If custom conf file is not set then it is only read only configs if self.custom_conf_file is None: raise GconfNotConfigurable() # If a config can not be modified if name != "all" and not self._is_configurable(name): raise GconfNotConfigurable() cnf = ConfigParser() with open(self.custom_conf_file) as f: cnf.readfp(f) # Nothing to Reset, Not configured if name != "all": if not cnf.has_option("vars", name): return True # Remove option from custom conf file cnf.remove_option("vars", name) else: # Remove and add empty section, do not disturb if config file # already has any other section try: cnf.remove_section("vars") except NoSectionError: pass cnf.add_section("vars") with open(self.tmp_conf_file, "w") as fw: cnf.write(fw) os.rename(self.tmp_conf_file, self.custom_conf_file) self.reload() return True
class Config(object): LIVE_CONFIG_PATH = os.path.join(project_base_dir, 'conf.cfg') TEST_CONFIG_PATH = os.path.join(project_base_dir, 'tests/conf.cfg') def __init__(self, path, auto_write): self._parser = ConfigParser() self._parser.readfp(open(path)) self._path = path self._auto_write = auto_write self.set_current_config(self) def __getattr__(self, attr_name): if hasattr(self._parser, attr_name): return getattr(self._parser, attr_name) else: return ConfigSection(attr_name, self._parser, self._path, self._auto_write) def all(self): d = {} for section in self._parser.sections(): d[section] = {} for name, value in self._parser.items(section): d[section][name] = value return d @classmethod def get_current_config(cls): if hasattr(cls, "config"): return cls.config else: return None @classmethod def set_current_config(cls, config): cls.config = config
def parse(self, firstRun=False): try: FOLDER = abspath(dirname(__file__)) fp = open(join(FOLDER, '..', self.filename)) md5 = hashlib.md5(fp.read()).hexdigest() if self.md5 is None: self.md5 = md5 else: if self.md5 == md5: return fp.close() fp.seek(0) cfg = ConfigParser() cfg.readfp(fp) fp.close() except Exception, e: if firstRun: raise e else: log.msg("ERROR: Could not reload configuration: %s" % e) return
def parse_audit_files(): audit_paths = glob.glob('audit/workflow_mmlinear_started_20151026_17*') audit_data = [] for path in audit_paths: with open(path) as fh: cp = ConfigParser() cp.readfp(fh) for sec in cp.sections(): dat = dict(cp.items(sec)) if 'train_lin' in dat['instance_name']: ms = re.match('train_lin_trn([0-9]+|rest)_tst([0-9]+)_c([0-9\.]+)', dat['instance_name']) if ms is None: raise Exception('No match in name: ' + dat['instance_name']) m = ms.groups() dat['training_size'] = m[0] dat['test_size'] = m[1] dat['cost'] = m[2] audit_data.append(dat) db = dataset.connect('sqlite:///:memory:') tbl = db['audit'] for d in audit_data: tbl.insert(d) return tbl
def initStates(): # Read configuration file config = ConfigParser() config.readfp(openFile(FULL_MAIN_CONF)) pdsTab = config.get('PDS', 'tab') pxTab = config.get('PX', 'tab') generalMonitoringTab = config.get('GM', 'tab') adminTab = config.get('ADMIN', 'tab') # Set all tabs to ON for tab in tabs: states[tab] = 1 # Check if configuration file set some tabs to 'OFF' if pdsTab != 'ON': states['pdsClients'] = states['pdsSources'] = 0 if pxTab != 'ON': states['pxCircuits'] = 0 if generalMonitoringTab != 'ON': states['generalMonitoring'] = 0 if adminTab != 'ON': states['admin'] = 0
def parse(cls, s): data = {} cfg = ConfigParser() s = "[ScribusScript]\n" + s cfg.readfp(StringIO(s)) options = cfg.options("ScribusScript") if not len(options): raise EmptyDescriptor for item in cls.items: if not item.name in options: if item.required: raise ValidationError("Option %r required but not set" % item.name) else: continue options.remove(item.name) value = cfg.get("ScribusScript", item.name) data[item.name] = item(value) if options: raise ValidationError("Invalid options found: %s" % ", ".join(options)) return cls(**data)
class ReviewConfig(object): """ Provides a domain level API to a repositories .lintrc file. Allows reading tool names and tool configuration """ def __init__(self, lintrc): self._config = ConfigParser() self._config.readfp(StringIO(lintrc)) def linters(self): try: values = self._config.get('tools', 'linters') return map(lambda x: x.strip(), values.split(',')) except: return [] def linter_config(self, tool): tool_name = 'tool_' + tool try: config = self._config.items(tool_name) return dict(config) except: return []
def load_config(main_section): config = ConfigParser({'log.level': "DEBUG", 'log.file': None}) path = None first_path = BaseDirectory.load_first_config('bugwarrior') if first_path is not None: path = os.path.join(first_path, 'bugwarriorrc') old_path = os.path.expanduser("~/.bugwarriorrc") if path is None or not os.path.exists(path): if os.path.exists(old_path): path = old_path else: path = os.path.join(BaseDirectory.save_config_path('bugwarrior'), 'bugwarriorrc') config.readfp( codecs.open( path, "r", "utf-8", ) ) config.interactive = False # TODO: make this a command-line option validate_config(config, main_section) return config
class GSOParameters(object): """Represents the set of the variables of the algorithm""" def __init__(self, file_name=None): self._config = ConfigParser() try: if file_name: self._config.readfp(open(file_name)) else: self._config.readfp(open("%s%s" % (os.environ['LIGHTDOCK_CONF_PATH'], 'glowworm.conf'))) except Exception, e: raise GSOParameteresError(str(e)) try: self.rho = float(self._config.get('GSO', 'rho')) self.gamma = float(self._config.get('GSO', 'gamma')) self.beta = float(self._config.get('GSO', 'beta')) self.initial_luciferin = float(self._config.get('GSO', 'initialLuciferin')) self.initial_vision_range = float(self._config.get('GSO', 'initialVisionRange')) self.max_vision_range = float(self._config.get('GSO', 'maximumVisionRange')) self.max_neighbors = int(self._config.get('GSO', 'maximumNeighbors')) except Exception, e: raise GSOParameteresError("Problem parsing GSO parameters file. Details: %s" % str(e))
def _read_ini_file(self, root, ini_file_path): try: parser = ConfigParser() ini_file = codecs.open(os.path.join(root, ini_file_path), 'r', 'utf-8') parser.readfp(ini_file) #read config group_id = parser.get('general', 'id') group_alias = parser.get('ui', 'alias') group_icon_path = os.path.join(root, parser.get('ui', 'icon')) #try read translations posible_trans = parser.items('ui') for key, val in posible_trans: if type(key) is unicode and key == 'alias[%s]' % self.locale: self.translator.append(group_alias, val) break #append to groups self.groups[group_id] = QMenu(self.tr(group_alias)) self.groups[group_id].setIcon(QIcon(group_icon_path)) except Exception, e: error_message = 'Group INI file can\'t be parsed: ' + e.message QgsMessageLog.logMessage(error_message, level=QgsMessageLog.CRITICAL)
def _getConfig(configFile): '''Get the configuration. This populates a configuration with default values and then overwrites them with the configFile, which may be None (in which case, no overwriting happens). ''' configParser = ConfigParser() configParser.add_section('factories') for key, val in _defaultFactories.iteritems(): configParser.set('factories', key, val) configParser.add_section('index') configParser.set('index', 'path', 'index') configParser.set('index', 'pageSize', '20') current = '/'.join(os.path.split(os.getcwd())) configParser.add_section('policies') configParser.set('policies', 'repo', 'file:%s/policy' % current) configParser.set('policies', 'validation', 'file:%s/policy' % current) configParser.set('policies', 'user', '%s/user.db' % current) if configFile is not None: configParser.readfp(file(configFile)) return configParser
def _makeInstance(self, id, portal_type, subdir, import_context): context = self.context subdir = '%s/%s' % (subdir, id) properties = import_context.readDataFile('.properties', subdir) tool = getToolByName(context, 'portal_types') try: tool.constructContent(portal_type, context, id) except ValueError: # invalid type return None content = context._getOb(id) if properties is not None: if '[DEFAULT]' not in properties: try: FolderishDAVAwareFileAdapter(content).import_(import_context, subdir) return content except (AttributeError, MethodNotAllowed): # Fall through to old implemenatation below pass lines = properties.splitlines() stream = StringIO('\n'.join(lines)) parser = ConfigParser(defaults={'title': '', 'description': 'NONE'}) parser.readfp(stream) title = parser.get('DEFAULT', 'title') description = parser.get('DEFAULT', 'description') content.setTitle(title) content.setDescription(description) return content
def CheckForUpdates(): """Checks for updates of decoder.py on the official page and mirror. If you are not connected to the internet or both servers are unavailable or the downloaded update script is corrupted, None is returned. If everything went fine, the dictionary of update script options, with their values, is returned. To check for new version do: >>> nv = decoder.CheckForUpdates() >>> if nv: >>> if nv["__version__"] <= decoder.__version__: >>> print "No new version available!" >>> else: >>> print "A new version is", nv["__version__"] >>> else: >>> print "Something wrong with net connection, DNS, servers, uscripts etc." """ from urllib2 import urlopen, Request from sys import platform from ConfigParser import ConfigParser # The first URL is a script that counts update checkers # It increases the counter and returns the file stored on second URL # Nothing to worry about, anyway. Just statistics. urls = ("http://www.brailleweb.com/cgi-bin/uscript.py", "http://www.brailleweb.com/projects/decoder.py", "http://brailleweb.webhop.net/decoder.py/updates.pdc") for x in urls: try: req = Request(x) req.add_header("User-Agent", "decoder.py/" + __version__ + " on " + platform) u = urlopen(req) cp = ConfigParser() cp.readfp(u) d = {} for y in cp.options("decoder.py"): d[y] = cp.get("decoder.py", y) return d except: pass
def onLoadConfig(self, inifile): """Parse a file-like object for the loaded l10n.ini file.""" cp = ConfigParser(self.defaults) cp.readfp(inifile) depth = self.getDepth(cp) self.baseurl = urljoin(self.inipath, depth) # create child loaders for any other l10n.ini files to be included try: for title, path in cp.items('includes'): # skip default items if title in self.defaults: continue # add child config parser self.addChild(title, path, cp) except NoSectionError: pass # try to load the "dirs" defined in the "compare" section try: self.dirs.extend(cp.get('compare', 'dirs').split()) except (NoOptionError, NoSectionError): pass # try getting a top level compare dir, as used for fennec try: self.tld = cp.get('compare', 'tld') # remove tld from comparison dirs if self.tld in self.dirs: self.dirs.remove(self.tld) except (NoOptionError, NoSectionError): self.tld = None # try to set "all_path" and "all_url" try: self.all_path = cp.get('general', 'all') self.all_url = urljoin(self.baseurl, self.all_path) except (NoOptionError, NoSectionError): self.all_path = None self.all_url = None return cp
def executable_check(rootdir, outputdir, input_path, verbose=False): ltp_test_exe = os.path.join(rootdir, "bin/ltp_test") if not which(ltp_test_exe): logging.error("ltp_test: ltp_test is not executable.") logging.info("ltp_test: all dynamic checks are skipped.") return False original_config_path = os.path.join(os.path.join(rootdir, "conf"), "ltp.cnf") cfg_str = '[root]\n' + open(original_config_path, "r").read() cfg = ConfigParser() cfg.readfp(StringIO.StringIO(cfg_str)) config_path = "/tmp/ltp.autotest.ltp.conf" cofs = open(config_path, "w") print >> cofs, "target = all" def concatenate(name): model = cfg.get("root", name) if not model.startswith("/"): print >> cofs, ("%s = %s" % (name, os.path.join(rootdir, model))) concatenate("segmentor-model") concatenate("postagger-model") concatenate("parser-model") concatenate("ner-model") concatenate("srl-data") cofs.close() command = [ltp_test_exe, config_path, "srl", input_path] logging.info("ltp_test: dynamically executable check is running.") ofs = open(os.path.join(outputdir, "output.txt"), "w") subprocess.call(command, stdout=ofs, stderr=DUMMY) ofs.close() logging.info("ltp_test: dynamically executable check is done.") return True
def testConfig(self, goodconfig, newline, passes=0): if newline: testconfig = goodconfig + newline + "\r\n" # Write out to a StringIO object newfile = StringIO(testconfig) try: testparser = ConfigParser() testparser.readfp(newfile) # Line looks ok, add it to the config file return testconfig except MissingSectionHeaderError: if passes > 0: # Something is odd here... just return the version that works return goodconfig else: return self.testConfig(goodconfig + "[" + self.section + "]\n", newline, passes=1) except ParsingError: # Ignore the line, don't add it to the config file return goodconfig
def dashboard_at(api, filename, datetime=None, revision=None): if datetime: revision = revision_at(api, datetime) if not revision: return revision content = api.dashboard_content_load(filename, revision) if filename in ('ignored_requests'): if content: return yaml.safe_load(content) return {} elif filename in ('config'): if content: # TODO re-use from osclib.conf. from ConfigParser import ConfigParser import io cp = ConfigParser() config = '[remote]\n' + content cp.readfp(io.BytesIO(config)) return dict(cp.items('remote')) return {} return content
def begin(client=connect_name): """ Returns the krpc connection object """ cf = ConfigParser() try: cf.readfp(open(cfg_file)) except Exception: print("Error opening config file") sys.exit(1) try: server = cf.get("krpc", "server") port = int(cf.get("krpc", "port")) stream_port = port + 1 except Exception: print("Error reading the server from " + cfg_file + ". Is the format correct?") sys.exit(1) conn = krpc.connect(client + " " + str(version), address=server, rpc_port=port, stream_port=stream_port) return conn