def parse(self, text): """ Parses the given configuration. Asserts that all required keys and no invalid keys were specified in the configuration. :param text: configuration to parse :type text: string or open file """ if not hasattr(text, 'readline'): text = StringIO(text) ini_parser = SafeConfigParser() ini_parser.readfp(text) if 'DEFAULT' in ini_parser.sections(): self.global_settings = dict(ini_parser.items('DEFAULT')) if 'tractor' in ini_parser.sections(): found_req_keys = [] invalid_keys = [] for key, value in ini_parser.items('tractor'): if not key in self.KEYS: invalid_keys.append(key) continue if key in self.REQUIRED_KEYS: found_req_keys.append(key) self.settings[key] = value # if invalid_keys: raise ParsingError('Found invalid keys in [tractor] ' 'config section: %s' % (invalid_keys,)) diff = self.REQUIRED_KEYS.difference(found_req_keys) if diff: raise ParsingError('Not all required keys found in [tractor] ' 'config section (found: %s, missing: %s).' % (found_req_keys, list(diff)))
def _loadConfig(configFile): parser = SafeConfigParser() if (len(parser.read(configFile)) == 0): logging.error("Configuration file not found (%s)", configFile) exit() bgpConfig = parser.items("BGP") dataplaneConfig = dict() for vpnType in ['ipvpn', 'evpn']: try: dataplaneConfig[vpnType] = dict( parser.items("DATAPLANE_DRIVER_%s" % vpnType.upper())) except NoSectionError: if vpnType == "ipvpn": # backward compat for ipvpn dataplaneConfig['ipvpn'] = dict( parser.items("DATAPLANE_DRIVER")) logging.warning("Config file is obsolete, should have a " "DATAPLANE_DRIVER_IPVPN section instead of" " DATAPLANE_DRIVER") else: logging.error( "Config file should have a DATAPLANE_DRIVER_EVPN section") apiConfig = parser.items("API") # TODO: add a default API config config = {"bgpConfig": dict(bgpConfig), "dataplaneConfig": dataplaneConfig, "apiConfig": dict(apiConfig) } return config
def createS3TestsUsers(rgw_node, rgw_name): config = SafeConfigParser() config.read('s3tests/s3-tests.conf') s3main_list = config.items('s3 main')[3:] s3main_data = {} s3main_data['client_key'] = "client.radosgw.%s" % (rgw_name) for i in range (len(s3main_list)): s3main_data[s3main_list[i][0]] = s3main_list[i][1] s3main_usercreate_cmd = 'sudo radosgw-admin -n {client_key} user create --uid={user_id} --display-name=\"{display_name}\" \ --email={email} --access_key={access_key} --secret={secret_key} --key-type s3'.format(**s3main_data) cmd = "ssh %s %s" % (rgw_node, s3main_usercreate_cmd) rc,stdout,stderr = launch(cmd=cmd) if rc != 0: raise Exception, "Error while creating s3 main user'. Error message: '%s'" % (stderr) s3alt_list = config.items('s3 alt')[3:] s3alt_data = {} s3alt_data['client_key'] = "client.radosgw.%s" % (rgw_name) for i in range (len(s3alt_list)): s3alt_data[s3alt_list[i][0]] = s3alt_list[i][1] s3alt_usercreate_cmd = 'sudo radosgw-admin -n {client_key} user create --uid={user_id} --display-name=\"{display_name}\" \ --email={email} --access_key={access_key} --secret={secret_key} --key-type s3'.format(**s3alt_data) cmd = "ssh %s %s" % (rgw_node, s3alt_usercreate_cmd) rc,stdout,stderr = launch(cmd=cmd) if rc != 0: raise Exception, "Error while creating s3 alt user'. Error message: '%s'" % (stderr)
def parse_config(filename, dirs=None): if dirs: filenames = [os.path.join(d, filename) for d in dirs] else: filenames = [filename] config = SafeConfigParser() n = config.read(filenames) if not len(n) >= 1: raise PkgNotFound("Could not find file(s) %s" % str(filenames)) # Parse meta and variables sections meta = parse_meta(config) vars = {} if config.has_section('variables'): for name, value in config.items("variables"): vars[name] = _escape_backslash(value) # Parse "normal" sections secs = [s for s in config.sections() if not s in ['meta', 'variables']] sections = {} requires = {} for s in secs: d = {} if config.has_option(s, "requires"): requires[s] = config.get(s, 'requires') for name, value in config.items(s): d[name] = value sections[s] = d return meta, vars, sections, requires
class JsonConfigParser(BaseConfigParser): CONFIG_FILE = 'polygamy.json' def parse_file(self): with open(self.config_path) as config_file: json_data = json.loads(config_file.read()) self.repositories = json_data['repos'] self.remotes = json_data['remotes'] self.preference_config = ConfigParser() self.preference_config.add_section('groups') self.preference_config.add_section('git') self.preference_config.read( os.path.join(self.config_dir, 'preferences.ini') ) self.enabled_groups = { k for k, v in self.preference_config.items('groups') } self.git_config = self.preference_config.items('git') def save_preferences(self): self.preference_config.remove_section('groups') self.preference_config.add_section('groups') for group in self.enabled_groups: self.preference_config.set('groups', group, '') with open(os.path.join(self.config_dir, 'preferences.ini'), 'w') as f: self.preference_config.write(f)
def configure(config_file): if not config_file: print "==> ERROR: configuration file not specified!!!" sys.exit(1) else: print "==> Reading configuration file..." cf_parser = SafeConfigParser() if len(cf_parser.read(config_file)) == 0: print ('Cannot read configuration file: %s!' % config_file) sys.exit(1) for key,val in cf_parser.items('mnemos'): globals() [key]=val # logfile global logfile logfile=''+logdir+'/mnemos_publisher.log' # touch logfile if not os.path.exists(os.path.dirname(logfile)): os.system('sudo mkdir -p '+os.path.dirname(logfile)+'') os.system('sudo touch '+logfile+'') os.system('sudo chown mnemos:mnemos '+logfile+'') # config cloud plugin global cloud_cf cloud_cf = cf_parser.items(cloud_name) # config database plugin global db_cf db_cf = cf_parser.items(db_name)
def reload_config(cls, filename): cls.config_file = filename print "Loading configuration from " + filename if not os.path.isfile(filename): print "Config file not found - failing" sys.exit(1) cls.remote = None cls.local = None cls.nova = None parser = SafeConfigParser() parser.read(filename) if not parser.has_section('remote'): print "No remote database configuration - failing" sys.exit(1) if not parser.has_section('local'): print "No local database configuration - failing" sys.exit(1) if not parser.has_section('nova') and cls.nova is None: print "No nova credentials provided - failing" sys.exit(1) cls.remote = {} cls.local = {} cls.nova = {} for (name, value) in parser.items('remote'): cls.remote[name] = value for (name, value) in parser.items('local'): cls.local[name] = value for (name, value) in parser.items('nova'): cls.nova[name] = value verify_nova_creds(cls.nova)
def _from_stream(cls, stream, section, filename=None): "helper for from_string / from_path" # Py2k # pos = stream.tell() # end Py2k # p = SafeConfigParser() if py32_lang: # Py3.2 deprecated readfp p.read_file(stream, filename or "<???>") else: p.readfp(stream, filename or "<???>") # Py2k # try: items = p.items(section) except InterpolationSyntaxError, err: if not _is_legacy_parse_error(err): raise #support for deprecated 1.4 behavior, will be removed in 1.6 if filename: warn("from_path(): the file %r contains an unescaped '%%', this will be fatal in passlib 1.6" % (filename,), stacklevel=3) else: warn("from_string(): the provided string contains an unescaped '%', this will be fatal in passlib 1.6", stacklevel=3) p = ConfigParser() stream.seek(pos) p.readfp(stream) items = p.items(section)
def __init__( self ): config = SafeConfigParser( defaults ) config.read( os.path.expanduser( "~/.pyjacksmrc" ) ) self.jackname = config.get( "DEFAULT", "jackclientname" ) # setup infra if config.has_section( "infra" ): self.infra_clients = {} for inf in config.items( "infra" ): self.infra_clients[inf[0]] = inf[1] else: self.infra_clients = { "a2j": "a2jmidid" } # setup path if config.has_section( "path" ): self.path_map = {} for p in config.items( "path" ): self.path_map[p[0]] = p[1] else: self.path_map = {} # implicit clients... should b conf too self.implicit_clients = [ "system" ] self.sessiondir = os.path.expanduser( config.get( "DEFAULT", "sessiondir" ) ) + "/" self.templatedir = os.path.expanduser( config.get( "DEFAULT", "templatedir" ) ) + "/"
def items(self, section, raw=False, vars=None): """ Same as @c super.items(), but options from ancestors in the section tree are included in the result. """ nodes = section.split('.') config = dict(SafeConfigParser.items(self, nodes[0], raw, vars)) path = nodes[0] # Now this is a pain. We have to clear the DEFAULT section, lest the # code below overwrites the options that have defaults. The reason is # that crappy ConfigParser does not have a method that enumerates only # the keys defined in particular section; the keys in the DEFAULT # section are always included. tmp_defaults = self._defaults self._defaults = {} for node in nodes[1:]: path = node if len(path) == 0 else path + '.' + node try: next_config = dict(SafeConfigParser.items(self, path, raw, vars)) for key in self.options(path): # Overwrite only the keys defined in the child section config[key] = next_config[key] except ConfigParser.NoSectionError: pass self._defaults = tmp_defaults return config.iteritems()
def _parse_config_files(self): """ Parse the possible config files and set appropriate values default values """ parser = SafeConfigParser(self.defaults) parser.read(self.config_files) self.config = dict(parser.defaults()) if parser.has_section(self.command): self.config.update(dict(parser.items(self.command, raw=True))) for section in self.sections: if parser.has_section(section): self.config.update(dict(parser.items(section, raw=True))) else: raise NoSectionError("Mandatory section [%s] does not exist." % section) # filter can be either a list or a string, always build a list: if self.config['filter']: if self.config['filter'].startswith('['): self.config['filter'] = eval(self.config['filter']) else: self.config['filter'] = [ self.config['filter'] ] else: self.config['filter'] = []
def read(self, filepath): """ read configuration file @param filepath: full path to source file @type filepath: str @return: None """ # DEBUG: separate SafeConfigParser() instance per section really required (due to defaults)? # preferences cfg = SafeConfigParser(self.prefs) cfg.read(filepath) section = "preferences" self.baseDir = os.path.expanduser(cfg.get(section, "baseDir")) self.file_active = cfg.get(section, "file_active") self.file_archive = cfg.get(section, "file_archive") self.file_report = cfg.get(section, "file_report") self.useUTC = cfg.get(section, "useUTC") # colors cfg = SafeConfigParser(self.colors) cfg.read(filepath) for k, v in cfg.items("colors"): self.colors[k] = v # highlight colors cfg = SafeConfigParser(self.highlightColors) cfg.read(filepath) for k, v in cfg.items("highlights"): self.colors[k] = v
def load_track(path,default_accessor="ArrayAccessor",**kwargs): """ Factory function that returns a Track instance to access the data stored in <path>. It determines the correct ArrayAccessor to use by looking for the config file track.rc in the directory. """ from ConfigParser import SafeConfigParser as ConfigParser, NoSectionError import os import byo.io.track_accessors as track_accessors if path.endswith('.fa') or path.endswith('.fna') or path.endswith('.fasta'): system,ext = os.path.splitext(os.path.basename(path)) return Track(os.path.dirname(path),track_accessors.GenomeAccessor,system=system,**kwargs) elif path.endswith(".bam") and os.path.isfile(path): return Track(path,track_accessors.BAMAccessor,description="BAM('%s')" % os.path.basename(path), **kwargs) track_dict = dict(accessor_type=default_accessor,description=os.path.basename(path)) try: cp = ConfigParser() cp.read(os.path.join(path,"track.rc")) track_dict.update(dict(cp.items("track"))) kwargs.update(dict(cp.items("kwargs"))) kwargs.update(track_dict) #except IOError,NoSectionError: # TODO: Do not just silence everything. Might shadow important issues except: pass #from pprint import pprint #pprint(kwargs) accessor = getattr(track_accessors,track_dict["accessor_type"]) return Track(path,accessor,**kwargs)
def process_script(s): out = ['--allowmissing'] cfg = SafeConfigParser() cfg.readfp(file(s)) for k, v in cfg.items('args'): out.append('--%s' % k) out.append(v) for rname in cfg.sections(): if rname == 'args': continue out.append('--delregion') out.append(rname) out.append('--newregion') out.append(rname) for k, v in cfg.items(rname): if k == 'tiles': k = 'setregiontiles' out.append('--%s' % k) out.append('%s=%s' % (rname, v)) return out
def parseConfig(f): """Use configparser module to parse configuration file config file must be in right format, otherwise, server cannot start This function returns 2 dictionaries: - handle_servers stores requested server, handles server and weight - constructed_cluster store requested server and handle server This thing is ugly but it works :-) """ print 'Reading configuration file....' cluster = {} group = {} constructed_cluster = {} parser = SafeConfigParser() parser.read(f) if parser.has_section('farms'): farm_list = [value for name, value in parser.items('farms')] for item in farm_list[0].split(','): if parser.has_section(item): cluster[item] = {name:value for name, value in parser.items(item)} else: sys.stderr.write("Configuration file error, no item 'farms' defined\n") sys.stderr.write("Exit!\n") sys.exit(2) for i in parser.get('farms','list').split(','): list_redirect_domain = [parser.get(parser.get(i, 'list').split(',')[j], 'domain') for j in range(len(parser.get(i, 'list').split(','))) ] constructed_cluster[parser.get(i, 'domain')] = list_redirect_domain for server_group, server_list in cluster.iteritems(): temporary_list = [] origin_domain_name = server_list['domain'] servers = [server for server in server_list['list'].split(',')] for s in servers: temporary_list.append([v for k, v in parser.items(s) if v != 'on']) group[origin_domain_name] = temporary_list return (group, constructed_cluster)
class ConfigLoader(object): def __init__(self, cfg_file): self.config = SafeConfigParser() with open(cfg_file, 'r') as config_file: self.config.readfp(config_file) self.login_script = self.config.get('system', 'login_script') self.repo_path = self.config.get('system', 'repo_path') self.repo_user = self.config.get('system', 'repo_user') self.lock_script = self.config.get('system', 'lock_script') def _filtered_sections(self, section_name): for section in self.config.sections(): if section.startswith(section_name + ":"): yield section @staticmethod def as_bool(value): return value.lower() in ('yes', 'true', 'on') @staticmethod def as_list(value): return [item.strip() for item in value.split(",") if value] @staticmethod def clean_section_name(name): return name.split(':')[1] @property def repos(self): for section in self._filtered_sections('repo'): values = dict(self.config.items(section)) values['path'] = self.clean_section_name(section) yield Repository.from_config(values, self.repo_path, self.lock_script) @property def users(self): for section in self._filtered_sections('user'): values = dict(self.config.items(section)) values['username'] = self.clean_section_name(section) yield User.from_config(values, self.login_script) @property def user_dict(self): return dict((user.username, user) for user in self.users) @property def repo_user_authorized_keys(self): home_dir = os.path.expanduser('~' + self.repo_user) if '~' in home_dir: raise ValueError("User {0!r} doesn't exist".format(self.repo_user)) return os.path.join(home_dir, '.ssh', 'authorized_keys')
def get_config(config_file='seq2seq.ini'): parser = SafeConfigParser() parser.read(config_file) # get the ints, floats and strings _conf_ints = [ (key, int(value)) for key,value in parser.items('ints') ] _conf_floats = [ (key, float(value)) for key,value in parser.items('floats') ] _conf_strings = [ (key, str(value)) for key,value in parser.items('strings') ] return dict(_conf_ints + _conf_floats + _conf_strings)
def test_command_config_with_invalid_smtp(self): config_changed = self.mm_config_fill() citems = self.get_default_config_scheme() config = SafeConfigParser() config.read(self.config_path) assert config.has_section('Mail') assert config.items('Mail') == citems, \ "\n[%s]\n[%s]" % (config.items('Mail'), citems)
def get_setup(): config = SafeConfigParser() config.read(['setup.cfg']) options = dict(config.items('options')) database = dict(config.items('database')) creds = dict(config.items('twitter_credentials')) return database, options, creds
def _parse_config(self, cnf_file): if not os.path.isfile(cnf_file): raise NoConfigError('Filename %s not found' % cnf_file) cnf = SafeConfigParser() cnf.read(cnf_file) self.db_cnf = dict(cnf.items("db")) self.svn_cnf = dict(cnf.items("svn"))
def _get_file(self, config): """ Read a per-user .ini file, which is expected to have either a ``[scraperkit]`` or a ``[$SCRAPER_NAME]`` section. """ config_file = SafeConfigParser() config_file.read([os.path.expanduser('~/.scrapekit.ini')]) if config_file.has_section('scrapekit'): config.update(dict(config_file.items('scrapekit'))) if config_file.has_section(self.scraper.name): config.update(dict(config_file.items(self.scraper.name))) return config
def _loadConfig (self): config = SafeConfigParser() # TODO: exception for lack of .cfg config.read(os.path.join(INSTALL_DIR,'osimage-autotest.cfg')) confitens = config.items('global') for param,value in confitens: self.engineConfig[param] = value confitens = config.items(self.engineConfig['virt']) for param,value in confitens: self.virtPlayerConfig[param] = value
def get_metadata_and_options(): config = SafeConfigParser() config.read(['metadata.cfg', 'site.cfg']) metadata = dict(config.items('metadata')) options = dict(config.items('options')) metadata['py_modules'] = list(filter(None, metadata['py_modules'].split('\n'))) metadata['classifiers'] = list(filter(None, metadata['classifiers'].split('\n'))) return metadata, options
def get_metadata_and_options(): config = SafeConfigParser() config.read(["metadata.cfg", "site.cfg"]) metadata = dict(config.items("metadata")) options = dict(config.items("options")) metadata["py_modules"] = filter(None, metadata["py_modules"].split("\n")) metadata["classifiers"] = filter(None, metadata["classifiers"].split("\n")) return metadata, options
def parse_ini(instance_type, check_all=True): parser = SafeConfigParser() parser.read(os.path.abspath('fabfile/conf/conf.ini')) parser.set('CONFIG', 'AFP_PATH', os.path.join(os.path.dirname(__file__), os.path.pardir)) parser.set('CONFIG', 'SSH_SETTING_PATH', _base('settings/ssh')) parser.set('PRODUCTION_ENV_CONFIG', 'AWS_ACCESS_KEY_ID', ec2_key) parser.set('PRODUCTION_ENV_CONFIG', 'AWS_SECRET_ACCESS_KEY', ec2_secret) parser.set('STAGING_ENV_CONFIG', 'AWS_ACCESS_KEY_ID', ec2_key) parser.set('STAGING_ENV_CONFIG', 'AWS_SECRET_ACCESS_KEY', ec2_secret) fabconf = {} _green("Parsing conf.ini file") for name, value in parser.items('CONFIG'): # print ' %s = %s' % (name, value) fabconf['%s' % name.upper()] = value for name, value in parser.items('%s' % env.environment.upper()): # print ' %s = %s' % (name, value) fabconf['%s' % name.upper()] = value if instance_type == 'messagingserver': fabconf['INSTANCE_NAME_TAG'] = "MessagingServer" fabconf['INSTANCE_RECIPE'] = "messagingserver" env_config = {} for name, value in parser.items('%s_ENV_CONFIG' % env.environment.upper()): # print ' %s = %s' % (name, value) env_config['%s' % name.upper()] = value fabconf['ENV_VARS'] = ','.join('{}="{}"'.format(i, k) for i, k in env_config.items()) env.fabconf = fabconf env.env_config = env_config env.ec2_amis = [fabconf['EC2_AMIS']] env.ec2_keypair = fabconf['EC2_KEYPAIR'] env.ec2_secgroups = [fabconf['EC2_SECGROUPS']] env.ec2_instancetype = fabconf['EC2_INSTANCETYPE'] print(_yellow("SSH private key verification...")) try: open(fabconf['SSH_PRIVATE_KEY_PATH']).read() except Exception, e: print(_red("SSH private key does not exist in the provided path %s !" % fabconf['SSH_PRIVATE_KEY_PATH'])) exit()
def get_scrapycfg_targets(cfgfiles=None): cfg = SafeConfigParser() cfg.read(cfgfiles or []) baset = dict(cfg.items('deploy')) if cfg.has_section('deploy') else {} targets = {} targets['default'] = baset for x in cfg.sections(): if x.startswith('deploy:'): t = baset.copy() t.update(cfg.items(x)) targets[x[7:]] = t return targets
def Main(): parser = argparse.ArgumentParser(description='Turbidostat controller.') parser.add_argument("-c", "--config_filename", default="config.ini", help="Where to load configuration from.") args = parser.parse_args() # Startup stacktracer for debugging deadlock stacktracer.trace_start("trace.html", interval=60, auto=True) # Read configuration from the config file config = SafeConfigParser() print 'Reading config file from', args.config_filename config.read(args.config_filename) controller_params = dict(config.items('controller')) port_names = dict(config.items('ports')) pump_params = dict(config.items('pump')) logs = dict(config.items('log')) # Open ports cont_port = serial.Serial(port_names['controllerport'], int(controller_params['baudrate']), timeout=4, writeTimeout=1) cont_port.lock = threading.RLock() if (port_names['pumpport'].upper()!='NONE'): pump_port = serial.Serial(port_names['pumpport'], int(pump_params['baudrate']),timeout = 1, writeTimeout = 1) pump_port.lock = threading.RLock() else: pump_port = None # Make and start the controler cont = Controller(controller_params, logs, pump_params, cont_port, pump_port) cont.start() # Setup network configue port def cb(cmd): if 'list' in cmd: return str(controller_params) netserv = CTBasicServer(('', int(port_names['network'])), cb) netserv.start() print 'num threads: ' + str(len(sys._current_frames().keys())) # Run until a keyboard interrupt. try: while True: time.sleep(1) except KeyboardInterrupt: print 'shutting down' cont.quit() time.sleep(1.1)
def from_file(cls, filename): config = SafeConfigParser() config.read(filename) metadata = dict(config.items('metadata')) required_vars = map(operator.itemgetter(1), config.items('required_vars')) return TemplateManifest(metadata=metadata, required_vars=required_vars)
def loadConfiguration(path): print 'Load configuration files settings' configAll = SafeConfigParser() configAll.read(path) profile = dict(configAll.items('profile')) owncloud = dict(configAll.items('owncloud')) stacksync = dict(configAll.items('stacksync')) log_server = dict(configAll.items('log_server')) graphite = dict(configAll.items('graphite')) return {'profile': profile, 'ss': {'owncloud': owncloud, 'stacksync': stacksync}, 'ls': log_server, 'graphite': graphite}
def getConf(cfgType): conf = {'auth':{}, 'backup':{}} config = SafeConfigParser() config.read('conf/auth/'+cfgType+'.auth') for arg in config.items('auth'): conf['auth'][arg[0]] = arg[1] config.read('conf/'+cfgType+'.cfg') for arg in config.items('backup'): conf['backup'][arg[0]] = arg[1] return conf
def get_host_modules(app, host_name=None, appman_file=None, config_dir=CONFIG_DIR): ''' Get a list of all hosts in the appman.conf file, and a list of modules to start on each host. If a specific host is queried, return a list just for that host ''' hosts = {} if appman_file is None: appman_file = 'appman.conf' conf_file = os.path.join(config_dir, app, appman_file) parser = SafeConfigParser() if not os.path.isfile(conf_file): raise ValueError('%s not found' % (conf_file)) parser.read(conf_file) for section in parser.sections(): if section[:7] == 'module ': module = section[7:] config = dict(parser.items(section)) host = DEFAULT_HOST if 'host' in config: host = config['host'] if host_name is not None: if host != host_name: continue if host not in hosts: hosts[host] = [] hosts[host].append(module) return hosts
def read_bitcoin_config(dbdir): """Read the wavi.conf file from dbdir, returns dictionary of settings""" from ConfigParser import SafeConfigParser class FakeSecHead(object): def __init__(self, fp): self.fp = fp self.sechead = '[all]\n' def readline(self): if self.sechead: try: return self.sechead finally: self.sechead = None else: s = self.fp.readline() if s.find('#') != -1: s = s[0:s.find('#')].strip() + "\n" return s config_parser = SafeConfigParser() config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "wavi.conf")))) return dict(config_parser.items("all"))
class ConfigWrapper(object): def __init__(self, path): self.__path = path self.__parser = SafeConfigParser(DEFAULTS) self.__parser.read(path) def __getattr__(self, section): return self[section] def __getitem__(self, section): return SectionWrapper(config=self, section=section) def __hasattr__(self, section): return section in self def __contains__(self, section): return self.__parser.has_section(section) def add_section(self, section): self.__parser.add_section(section) def _get_option(self, section, option): return self.__parser.get(section, option) def _set_option(self, section, option, value): self.__parser.set(section, option, value) with open(self.__path, 'wb') as configfile: self.__parser.write(configfile) def _has_option(self, section, option): return self.__parser.has_option(section, option) def _iter(self, section): return iter(self.__parser.items(section))
#!/usr/bin/python #-*- coding: UTF-8 -*- from ConfigParser import SafeConfigParser parser = SafeConfigParser() parser.read('multisection.ini') for section_name in parser.sections(): print 'Section:', section_name print 'Options:', parser.options(section_name) for name, value in parser.items(section_name): print ' {0} = {1}'.format(name, value) print
def main(): if len(sys.argv) < 4: print "Usage: sourceDir resultDir configFile" sys.exit(1) sourceDir = sys.argv[1] resultDir = sys.argv[2] configFile = sys.argv[3] inetnumFilePath = os.path.join(sourceDir, "inetnum") if not os.path.exists(inetnumFilePath): print "{0} doesn't exist".format(inetnumFilePath) sys.exit(1) orgFilePath = os.path.join(resultDir, "org_lacnic") mapFilePath = os.path.join(resultDir, "inetnum_org_lacnic") orgFd = open(orgFilePath, "w") mapFd = open(mapFilePath, "w") onlyStartRe = re.compile("^{\n?$", re.I) endStartRe = re.compile("^}{\n?$", re.I) onlyEndRe = re.compile("^}\n?$", re.I) configParser = SafeConfigParser() configParser.read(configFile) orgConfig = configParser.items("org") keys = [] for key, option in orgConfig: keys.append(key) orgFd.write("\t".join(keys) + "\n") netOrgMap = {} orgKeys = {} currObj = None currJsonList = [] orgNum = 0 objNum = 0 lineNum = 0 with open(inetnumFilePath, "r") as fd: for line in fd: lineNum += 1 if currObj: #current in a json object isEnd = False isStart = False if endStartRe.match( line): #match }{, got a complete json object isEnd = True isStart = True elif onlyEndRe.match(line): isEnd = True else: currJsonList.append(line) continue if isEnd: currJsonList.append("}") objNum += 1 if objNum % 10000 == 0: print "process {0} objects and {1} of them have org".format( objNum, orgNum) response = getOrgInfo(currJsonList) if response[0] == 1: orgNum += 1 resultDict = response[1] netKey = response[2] orgKey = response[3] if not orgKeys.has_key(orgKey): resultDict['type'] = "organisation" tempResp = crossMatch(resultDict, orgConfig) if tempResp[0] == 1: resultList = tempResp[1] orgFd.write("\t".join(resultList) + "\n") orgKeys[orgKey] = 1 netOrgMap[response[2]] = response[3] else: print "{0}".format(currJsonList) currObj = None currJsonList = [] if isStart: currJsonList = ["{"] currObj = 1 continue else: if onlyStartRe.match(line): currObj = 1 currJsonList.append("{") else: print "error process line at {0}: {1}".format( lineNum, line) sys.exit(1) for netKey in netOrgMap: mapFd.write("\t".join([netKey, netOrgMap[netKey]]) + "\n") print "finish processing {0} objects and {1} of them have org".format( objNum, orgNum) orgFd.close() mapFd.close()
def generate_components_config(): settings = {} settings["sahara"] = s2b(cluster_settings.get('sahara', 'false')) settings["murano"] = s2b(cluster_settings.get('murano', 'false')) settings["ceilometer"] = s2b(cluster_settings.get('ceilometer', 'false')) settings["volumes_lvm"] = s2b(cluster_settings.get('volumes_lvm', 'false')) settings["volumes_ceph"] = s2b( cluster_settings.get('volumes_ceph', 'false')) settings["images_ceph"] = s2b(cluster_settings.get('images_ceph', 'false')) settings["ephemeral_ceph"] = s2b( cluster_settings.get('ephemeral_ceph', 'false')) settings["osd_pool_size"] = cluster_settings.get('osd_pool_size', 1) return settings def s2b(v): return v.lower() in ("yes", "true", "t", "1") if __name__ == '__main__': parser = SafeConfigParser() parser.read(argv[1]) cluster_settings = dict(parser.items('cluster')) fuel_ip = argv[2] kvm_count = int(cluster_settings.get('node_count')) delete_environment() create_environment() deploy_environment() await_deploy()
parser.read('health_monitor.ini') # Read path to log file LOG_FILENAME = parser.get('config', 'log_filename') # monitoring period in seconds MONITORING_PERIOD = parser.getint('config', 'monitoring_period') # minimum logging period in seconds MIN_LOG_PERIOD = parser.getint('config', 'min_log_period') # remote logging URL REMOTELOG_URL = parser.get('config', 'remotelog_url') # List of monitored devices names and IP addresses MONITORED_DEVICES = parser.items('monitored_devices') ################# # LOGGING SETUP ################# LOG_LEVEL = logging.INFO # Could be e.g. "DEBUG" or "WARNING" # Configure logging to log to a file, making a new file at midnight and keeping the last 3 day's data # Give the logger a unique name (good practice) logger = logging.getLogger(__name__) # Set the log level to LOG_LEVEL logger.setLevel(LOG_LEVEL) # Make a handler that writes to a file, making a new file at midnight and keeping 3 backups handler = logging.handlers.TimedRotatingFileHandler(LOG_FILENAME, when="midnight", backupCount=3)
class EasyConfig(object): """ This class is a wrapper for python's SafeConfigParser. The purpose of this class is to allow for more convenient use of the ConfigParser, along with adding built-in support for list type. Additionally, the class handle the type conversion implicitly inside it, so config options are retrieved with their appropriate type. Accessing configuration options with the class is performed as if the options where class members, so, for example, if the configuration file as the following section and item: [SomeSection] some_item = some_value Then, accessing the item will look like that: conf = EasyConfig("path_to_config.ini") print conf.SomeSection.some_item # Will print "some_value" conf.SomeSection.some_item = 3.14 print conf.SomeSection.some_item # Will print 3.14 conf.SomeSection.some_item += 0.01 print conf.SomeSection.some_item # Will print 3.15 A list config item is written as if it was a python's legit list deler- ation. For example: [SomeSection] some_list = [1, 2, 3] """ def __init__(self, config): """ The class is initialized with a path to a configuration file passed via config, or a file-like object. """ self.parser = SafeConfigParser() if isinstance(config, basestring): self.config_path = config config = open(self.config_path, "r") else: self.config_path = None self.parser.readfp(config) for section in self.parser.sections(): options = self.parser.items(section) options = map(lambda op: Option(*op), options) self.add_section(Section(section, options)) def __iter__(self): """ Iterate over all the section in the configuration. """ for section in self.__dict__.values(): if isinstance(section, Section): yield section def __iadd__(self, section): """ If section is of type Section, adds the section to the config (if it's not already present). Otherwise, the operator does nothing. """ self.add_section(section) return self def add_section(self, section): """ Adds a new section to the config. If section is str, it's converted to Section object. If section is Section, it's left untouched. If a section with the same name does not already exists in the config, it's added, and the function returns True. If such section already exists, the call is ignored, and the function return False. """ if isinstance(section, Section): section_name = section.name elif isinstance(section, str): section_name = section section = Section(section) else: raise TypeError("section should be either str or Section") if not self.__dict__.has_key(section_name): setattr(self, section_name, section) return True else: return False def save(self, config = None): """ Will pass the configuration to the ConfigParser module, and save the configuration. If config is none, the configuration will be saved in the path with which the object was initialized. If config is a path to a file, the configuration will be saved in that file. If config is a file-like object, it will be written to it. """ if not config: if not self.config_path: raise RuntimeError( "No output defined (either path or file-like object.") config = open(self.config_path, "w") elif isinstance(config, basestring): config = open(config, "w") self.update_parser() self.parser.write(config) def update_parser(self): """ Updates the state of the internal ConfigParser. """ for section in self: if not self.parser.has_section(str(section)): self.parser.add_section(str(section)) for option in section: self.parser.set(str(section), str(option), option.value_str) def upgrade(self, other_easy_config): """ Adds all the values from the other config to the current config. If the other config contains options that are already present in the current config, the new options are ignored. """ for other_section in other_easy_config: # False means that such section name already exists. if not self.add_section(other_section): for other_option in other_section: section = getattr(self, other_section.name) section += other_option
def __init__(self, params, status, calib_data, service_name): """ :param params: pywws configuration. :type params: :class:`pywws.DataStore.params` :param status: pywws status store. :type status: :class:`pywws.DataStore.status` :param calib_data: 'calibrated' data. :type calib_data: :class:`pywws.DataStore.calib_store` :param service_name: name of service to upload to. :type service_name: string """ self.logger = logging.getLogger('pywws.ToService(%s)' % service_name) self.params = params self.status = status self.data = calib_data self.service_name = service_name # 'derived' services such as 'underground_rf' share their # parent's config and templates config_section = self.service_name.split('_')[0] if config_section == self.service_name: self.parent = None else: self.parent = config_section self.old_response = None self.old_ex = None self.http_headers = None # set default socket timeout, so urlopen calls don't hang forever if eval(self.params.get('config', 'asynchronous', 'False')): socket.setdefaulttimeout(60) else: socket.setdefaulttimeout(20) # open params file service_params = SafeConfigParser() service_params.optionxform = str param_string = pkg_resources.resource_string( 'pywws', 'services/%s.ini' % (self.service_name)) if sys.version_info[0] >= 3: param_string = param_string.decode('utf-8') service_params.readfp(StringIO.StringIO(param_string)) # get URL self.server = service_params.get('config', 'url') parsed_url = urlparse.urlsplit(self.server) if parsed_url.scheme == 'aprs': self.send_data = self.aprs_send_data server, port = parsed_url.netloc.split(':') self.server = (server, int(port)) elif parsed_url.scheme == 'mqtt': self.send_data = self.mqtt_send_data else: self.send_data = self.http_send_data self.use_get = eval(service_params.get('config', 'use get')) # get fixed part of upload data self.fixed_data = dict() for name, value in service_params.items('fixed'): if value[0] == '*': value = self.params.get(config_section, value[1:], 'unknown') self.fixed_data[name] = value # create templater self.templater = Template.Template(self.params, self.status, self.data, self.data, None, None, use_locale=False) template_name = self.params.get(config_section, 'template', 'default') if template_name != 'default': template_dir = self.params.get( 'paths', 'templates', os.path.expanduser('~/weather/templates/')) self.template_file = open( os.path.join(template_dir, template_name), 'rb') else: template_name = 'services/%s_template_%s.txt' % ( config_section, self.params.get('config', 'ws type')) if not pkg_resources.resource_exists('pywws', template_name): template_name = 'services/%s_template_1080.txt' % ( config_section) self.template_file = pkg_resources.resource_stream( 'pywws', template_name) # get other parameters self.auth_type = service_params.get('config', 'auth_type') if self.auth_type == 'basic': user = self.params.get(config_section, 'user', 'unknown') password = self.params.get(config_section, 'password', 'unknown') details = user + ':' + password self.auth = 'Basic ' + base64.b64encode( details.encode('utf-8')).decode('utf-8') elif self.auth_type == 'mqtt': self.user = self.params.get(config_section, 'user', 'unknown') self.password = self.params.get(config_section, 'password', 'unknown') self.catchup = eval(service_params.get('config', 'catchup')) self.expected_result = eval(service_params.get('config', 'result')) self.interval = eval(service_params.get('config', 'interval')) self.interval = max(self.interval, 40) self.interval = timedelta(seconds=self.interval) if service_params.has_option('config', 'http_headers'): self.http_headers = eval( service_params.get('config', 'http_headers')) # move 'last update' from params to status last_update = self.params.get_datetime(self.service_name, 'last update') if last_update: self.params.unset(self.service_name, 'last update') self.status.set('last update', self.service_name, last_update.isoformat(' ')) # set timestamp of first data to upload self.next_update = datetime.utcnow() - max( timedelta(days=self.catchup), self.interval)
class Model: def __init__(self): # gui flag self.gui = False # cred config section self.taxicompany = False self.rentcompany = False self.countercars = 0 self.allcars = 0 self.currentfirstcar = 0 self.phonenumbersgiven = False self.phonenumbers = [] # flag config section self.loginwifi = False self.creategoogleaccount = False self.installapps = False self.pairdriverapp = False self.configuresettings = False # wifi config section self.wifissid = "" self.wifipw = "" # create google account config section self.birthday = "00" self.birthmonth = "00" self.birthyear = "0000" self.customerfirstname = "" self.customerlastname = "" # # install apps config section self.installappsps = [] self.installappslf = [] self.startapps = [] self.simpins = [] # pair driver app config section self.drivername = "" self.driverpw = "" # configure settings config section self.configurepowersavingmode = False self.configurehomescreen = False self.configurelockscreenapp = False self.configurelocationsettings = False self.configuresoundsettings = False self.disablesimlock = False self.configurescreenbrightness = False self.disabledevoptions = False self.rebootdevicewhenfinished = False # configure device lock self.enablelockdevice = False self.lockpattern = -1 # which mode should start ? (-c = Config Mode) if len(sys.argv) > 0 and sys.argv[1] == '-c': debug_print("Read Config") self.parser = SafeConfigParser() self.mode = ScriptMode.configmode self.interactivemode = None self.parser.read('../config.cfg') for section_name in self.parser.sections(): print 'Section:', section_name if section_name == "cred": self._iterateoverlistandaddproperties(self.parser.items(section_name)) if section_name == "flags": self._iterateoverlistandaddproperties(self.parser.items(section_name), True) print ' Options:', self.parser.options(section_name) for name, value in self.parser.items(section_name): print ' %s = %s' % (name, value) if len(sys.argv) > 2: if sys.argv[2] == '-g': debug_print("GUI will be displayed") self.gui = True else: debug_print("Interactive Mode") self.mode = ScriptMode.interactivemode self.interactivemode = InteractiveMode() self.parser = None def __str__(self): return str(self.__class__) + ": " + str(self.__dict__) def _iterateoverlistandaddproperties(self, list, flagcred=None): if flagcred is None: for name, value in list: for ownattribute in [a for a in dir(self) if not a.startswith('__') and not callable(getattr(self, a))]: if name == ownattribute: setattr(self, name, value) print "Added to own model: " + name + " and value: " + value else: for name, value in list: for ownattribute in [a for a in dir(self) if not a.startswith('__') and not callable(getattr(self, a))]: if name == ownattribute: setattr(self, name, value) print "Added to own model: " + name + " and value: " + value if bool(value): self._iterateoverlistandaddproperties(self.parser.items(name)) def get_current_birthday(self): if int(self.birthday) == 28: self.birthday = 1 return self.birthday self.birthday += 1 return self.birthday def get_current_birthmonth(self): if int(self.birthmonth) == 12: self.birthmonth = 1 return self.birthmonth self.birthmonth += 1 return self.birthmonth def get_current_birthyear(self): if self.birthyear == 1995: self.birthyear = 1951 return self.birthyear self.birthyear += 1 return self.birthyear
def main_thread(): parser = create_parser() opt = parser.parse_args() conf = SafeConfigParser() if opt.conf: conf.readfp(opt.conf) elif opt.reset: conf.readfp(opt.reset) elif opt.list: conf.readfp(opt.list) else: parser.print_help() return workspace_ = conf.get('common', 'workspace') try: os.makedirs(workspace_) except OSError: pass if opt.reset: Filter(workspace_).reset() return if opt.list: keys = Filter(workspace_).list_doing_task() for k in keys: print k return output_service_conf = dict(conf.items('source')) input_service_conf = dict(conf.items('destination')) if conf.has_option('common', 'threads'): _threads = conf.getint('common', 'threads') else: _threads = 10 if conf.has_option('common', 'record_succ'): _record_succ = conf.getboolean('common', 'record_succ') else: _record_succ = True print _record_succ log_config['handlers']['error_file']['filename'] = path.join(workspace_, 'failed_files.txt') dictConfig(log_config) loads_services() output_service = services_[output_service_conf['type']](**output_service_conf) input_service = services_[input_service_conf['type']](**input_service_conf) migrator = ThreadMigrator(input_service=input_service, output_service=output_service, work_dir=conf.get('common', 'workspace'), threads=_threads, record_succ=_record_succ) migrator.start() import time try: while True: state = migrator.status() if state['finish']: break time.sleep(3) except KeyboardInterrupt: state = migrator.status() print state #import sys #sys.exit() migrator.stop() state = migrator.status() print 'summary:\n ', 'failed: ', state['fail'], ' success: ', state['success']
def get_search_names(path="/etc/geode/settings.conf", raw=False): """Gets all of the search in the config file""" parser = SCP() parser.read(path) return [search[0] for search in parser.items("Searches", raw=raw)]
class ClusterDiagram(MultiprocessingBase): def __init__(self, abort_event): super(ClusterDiagram, self).__init__(abort_event) self.name = 'ClusterDiagram' self.project = None self.files = [] #general options self.tags = [] self.order = [] self.marks = set() self.colors = {} # display options (also in general section) self.pagesize = 'A4' self.default_color = colors.grey self.no_border = False self.no_color = False #crosslinks options self.add_crosslinks = True self.min_identity = 0 self.min_length = 0 self.evalue = 10 #label options self.name_size = 9 self.gene_size = 10 self.unknown_gene_size = 7 self.gene_angle = 15 #results self.clusters = [] self.fsets = [] self.crosslinks = [] self.diagram = None def _get_option(self, section, option, default, conv=None): try: val = self.project.get(section, option) if conv is not None: val = conv(val) except (ConfigError, ValueError): val = default return val def load(self, project_file, overrides=None): #TODO: implement overrides self.project = SafeConfigParser() try: self.project.read(project_file) except ConfigError as e: print 'Unable to load %s:\n%s' % (project_file, str(e)) return False # parse general section if not self.project.has_section('general'): print 'Malformed project file.\nNo "general" section in %s' % project_file return False self.tags = shlex.split(self._get_option('general', 'tags', '')) if not self.tags: print 'No "tags" were value was found in "general" section in %s' % project_file return False self.name = project_file self.order = shlex.split(self._get_option('general', 'order', '')) self.marks = set(shlex.split(self._get_option('general', 'marks', ''))) self.pagesize = self._get_option('general', 'page_size', 'A4') self.no_border = self._get_option('general', 'no_border', None) is not None self.no_color = self._get_option('general', 'no_color', None) is not None self.default_color = self._get_option('general', 'default_color', '') if self.default_color: self.default_color = colors.HexColor(self.default_color) else: self.default_color = colors.grey # files section self.files = [] if self.project.has_section('files'): dir = self._get_option('files', 'dir', '') files = shlex.split(self._get_option('files', 'files', '')) if files: self.files = list(chain.from_iterable(glob(os.path.join(dir, f)) for f in files)) # crosslinks options self.add_crosslinks = self.project.has_section('crosslinks') if self.add_crosslinks: self.min_identity = self._get_option('crosslinks', 'min_identity', 0, float) self.min_length = self._get_option('crosslinks', 'min_length', 0, float) self.evalue = self._get_option('crosslinks', 'evalue', 10, float) # colors if have/needed if not self.no_color: if self.project.has_section('colors'): self.colors = dict((gene, colors.HexColor(col)) for gene, col in self.project.items('colors')) #else, colors will be generated automatically after the clusters are extracted # label options self.name_size = max(6, self._get_option('labels', 'name_size', 10, int)) self.gene_size = max(6, self._get_option('labels', 'gene_size', 10, int)) self.unknown_gene_size = max(6, self.gene_size - 2) self.gene_angle = self._get_option('labels', 'gene_angle', 15, float) return True @staticmethod def _feature_name(f, quals=('gene', 'locus_tag'), default='unknown'): for q in quals: val = f.qualifiers.get(q) if not val: continue return ' '.join(val) return default @MultiprocessingBase.data_mapper_method @shelf_result def _process_genome(self, gi, recs): record = recs[gi] clusters = [] for tag in self.tags: tag_clusters = [] for f in record.features: # cluster annotation q = f.qualifiers.get(ClusterFinder.tag_qualifier) if not q: continue if tag != ' '.join(q): continue cluster = f.extract(record) copy_attrs(record, cluster) if tag_clusters: cluster.description += ' cluster %d' % (len(tag_clusters)+1) cluster = _DiagramCluster(cluster, f.location) cluster.set_genes(f.qualifiers.get(ClusterFinder.genes_qualifier, [])) cluster.marked = record.id in self.marks tag_clusters.append(cluster) if tag_clusters: clusters += tag_clusters return clusters def _generate_gene_colors(self): if not self.clusters: return full_gene_set = set() for c in self.clusters: full_gene_set.update(c.genes) if 'NONE' in full_gene_set: full_gene_set.remove('NONE') ngenes = float(len(full_gene_set))-1 middle = ngenes/2.0 self.colors = {} for i, gene in enumerate(sorted(full_gene_set)): t = i/ngenes if i < middle: c = colors.linearlyInterpolatedColor(colors.Color(1, 0, 0, 1), colors.Color(0, 1, 0, 1), 0, 1, t*2) else: c = colors.linearlyInterpolatedColor(colors.Color(0, 0.9, 0.1, 1), colors.Color(0, 0, 1, 1), 0, 1, t*2-1) self.colors[gene] = c def extract_clusters(self): self.clusters = [] genomes = SeqView.safe_load(self.files) if not genomes: return False glen = len(genomes) self.clusters = [None]*glen if self.order: self.order = [oid for oid in self.order if oid in genomes.keys()] with ProgressCounter('Extracting clusters from provided genomes:', glen) as prg: work = self.Work() work.start_work(self._process_genome, self.order or genomes.keys(), None, genomes) work.assemble(ordered_shelved_results_assembler, self.clusters, prg) if not work.wait(): return False self.clusters = list(chain.from_iterable(c for c in self.clusters if c)) #generate gene colors if needed if not self.no_color and not self.colors: self._generate_gene_colors() return bool(self.clusters) @MultiprocessingBase.data_mapper_method def _blast_feature(self, f, c1, c2): trans = Translator(self._abort_event) cds = trans.translate(f.extract(c1), 11) sixframes = trans.translate_six_frames_single(c2, 11) if not sixframes: return [(None, None, None)] results = [] for frame in sixframes: res = BlastCLI.s2s_blast(cds, frame, self.evalue, command='blastp', task='blastp') if res: results.extend(res) hsps = BlastCLI.all_hsps(results) if not hsps: return [(None, None, None)] f1 = [] f2 = [] col = [] fname = self._feature_name(f, default='CDS') cds_len = len(cds) min_len = len(cds) * self.min_length for hsp in hsps: if hsp.align_length < min_len: continue if hsp.identities / float(hsp.align_length) < self.min_identity: continue color_t = (float(hsp.identities) / hsp.align_length) print '%s %s: %5.1f%% (%5.1f%%)' % (c1.description, fname, color_t * 100, float(hsp.identities) / cds_len * 100) col.append(colors.linearlyInterpolatedColor(colors.Color(0, 0, 1, 0.2), colors.Color(0, 1, 0, 0.2), 0.2, 1, color_t)) qstart = (hsp.query_start - 1) * 3 qend = qstart + hsp.align_length * 3 sstart = (hsp.sbjct_start - 1) * 3 send = sstart + hsp.align_length * 3 f1.append( SeqFeature(FeatureLocation(f.location.start + qstart, f.location.start + qend, strand=hsp.strand[0]))) f2.append(SeqFeature(FeatureLocation(sstart, send, strand=hsp.strand[1]))) return zip(f1, f2, col) @MultiprocessingBase.results_assembler_method def _compose_crosslink(self, index, result, features1, features2): for f1, f2, col in result: if f1 is None: continue if self.no_color: col = colors.color2bw(col) tf1 = features1.add_feature(f1, color=col, border=col) tf2 = features2.add_feature(f2, color=col, border=col) self.diagram.cross_track_links.append(CrossLink(tf1, tf2, col, col)) def _compose_crosslinks(self): if not self.diagram or not self.fsets: return print 'Finding crosslinks between genes in the cluster.' num_cids = len(self.clusters) for ci1, c1 in enumerate(self.clusters): if ci1 >= num_cids - 1: break features1 = self.fsets[ci1] ci2 = ci1 + 1 features2 = self.fsets[ci2] c2 = self.clusters[ci2] print '%s vs %s' % (c1.sequence.description, c2.sequence.description) work = self.Work() work.start_work(self._blast_feature, c1.CDS, None, c1.sequence, c2.sequence) work.assemble(self._compose_crosslink, features1, features2) work.wait() def draw_clusters(self): print 'Creating cluster diagram.' # create diagram self.diagram = GenomeDiagram.Diagram(self.name) # add tracks max_len = max(len(c) for c in self.clusters) normal_color = colors.grey if self.no_color else colors.black mark_color = colors.black if self.no_color else colors.red for cluster in self.clusters: col = mark_color if cluster.marked else normal_color track = self.diagram.new_track(1, name=cluster.label, greytrack=1, height=0.4, greytrack_fontcolor=col, greytrack_labels=1, greytrack_fontsize=self.name_size, scale=False, start=0, end=max_len) self.fsets.append(track.new_set()) # add crosslink features if self.add_crosslinks: self._compose_crosslinks() # add CDS-es for ci, cluster in enumerate(self.clusters): gene_id = 0 for f in cluster.CDS: known = False fname = 'NONE' if gene_id < len(cluster.genes): fname = cluster.genes[gene_id] fcolor = self.colors.get(fname, self.default_color) if fname == 'NONE': fname = self._feature_name(f, default='') fcolor = self.default_color else: #decapitalize gene names if they are marked as proteins # fname = fname[:1].lower() + fname[1:] if fname else '' known = True if self.no_color: fcolor = colors.color2bw(fcolor) self.fsets[ci].add_feature(f, sigil="BIGARROW", color=fcolor, border=fcolor if self.no_border else colors.black, name=fname, label=True, label_position="middle", label_size=self.gene_size if known else self.unknown_gene_size, label_color=colors.black if known else colors.grey, label_angle=self.gene_angle) gene_id += 1 self.diagram.draw(format="linear", pagesize=self.pagesize, fragments=1, start=0, end=max_len) for ptype in ('PDF', 'EPS', 'SVG'): dianame = '%s.%s' % (self.name, ptype.lower()) print 'Saving: %s' % dianame self.diagram.write(dianame, ptype) print 'Done.'
class ConfigSection(object): """ Wraps SafeConfigParser with static section handling :param defaults: dict-like containing default keys/values :param section: name of section to initially bind to :note: Not an exact interface reproduction, some functionality left out! """ def __init__(self, defaults, section): self._section = section # SafeConfigParser is old-style, and we're changing method parameters self._scp = SafeConfigParser(defaults) self._scp.add_section(self._section) def defaults(self): """ Returns dictionary of default options """ return self._scp.defaults() def sections(self): """ Returns a list containing this instances section-name """ return [self._section] def add_section(self, section): """ Not written, do not use! :raises NotImplementedError: DO NOT USE! """ raise NotImplementedError() def has_section(self, section): """ Returns True if instance-section == ``section`` :param section: Name of section to check. :returns: True/False if section exists. """ return section == self._section def options(self): """ Returns dictionary of all options keys/values """ return self._scp.options(self._section) def has_option(self, option): """ Returns True if key-named ``option`` exists :param option: Name of the option (key) to check. :returns: True/False if option (key) exists. """ return self._scp.has_option(self._section, option) # Private method doesn't need docstring def _prune_sections(self): # pylint: disable=C0111 for section in self._scp.sections(): if section != self._section: self._scp.remove_section(section) def read(self, filenames): """ Replace current contents with content from filename(s)/list :param filenames: Same as for ``SafeConfigParser.read`` method :return: List of successfully parsed filenames. """ result = self._scp.read(filenames) # Changes self._scp self._prune_sections() return result # Short name 'fp' mirrors use in ConfigParser module def readfp(self, fp, filename=None): # pylint: disable=C0103 """ Replace current contents with content from file :param fp: Same as for ``SafeConfigParser.readfp`` method :param filename: Same as for ``SafeConfigParser.readfp`` method :return: Same as for ``SafeConfigParser.readfp`` method """ result = self._scp.readfp(fp, filename) # Changes self._scp self._prune_sections() return result def get(self, option): """ Return value assigned to key named ``option`` :param option: Name of the ``option`` (key) to check. :returns: The value assigned to ``option`` """ return self._scp.get(self._section, option) def getint(self, option): """ Convert/Return value assigned to key named ``option`` :param option: Name of the ``option`` (key) to check. :return: Value assigned to ``option`` converted to an integer. """ return self._scp.getint(self._section, option) def getfloat(self, option): """ Convert/Return value assigned to key named ``option`` :param option: Name of the ``option`` (key) to check. :return: Value assigned to ``option`` converted to a float. """ return self._scp.getfloat(self._section, option) def getboolean(self, option): """ Convert/Return value assigned to key named ``option`` :param option: Name of the ``option`` (key) to check. :return: ``True``: if value is ``yes``, ``true``. ``False`` if ``no`` or ``false``. """ try: value = self._scp.get(self._section, option).lower().strip() positives = ("yes", "true") negatives = ("no", "false") if value in positives: return True if value in negatives: return False # try regular way except AttributeError: pass # try regular way return self._scp.getboolean(self._section, option) def set(self, option, value): """ Set value assigned to key named ``option`` :param option: Name of the ``option`` (key) to set. :param value: Content to assign to ``option``. :return: Same as for ``SafeConfigParser.set`` method. """ return self._scp.set(self._section, option, str(value)) def write(self, fileobject): """ Overwrite current contents of ``fileobject.name`` """ return self._scp.write(open(fileobject.name, "wb")) def merge_write(self, fileobject): """ Update section contents of ``fileobject.name`` by section only. """ scp = SafeConfigParser() # Safe if file doesn't exist scp.read(fileobject.name) # N/B: This won't work with DEFAULTS if not scp.has_section(self._section): scp.add_section(self._section) for key, value in self.items(): scp.set(self._section, key, value) scp.write(open(fileobject.name, "w+b")) # truncates file first def remove_option(self, option): """ Remove option-key ``option`` """ return self._scp.remove_option(self._section, option) def remove_section(self): """ Not implemented, do not use! :raises NotImplementedError: DO NOT USE! """ raise NotImplementedError() def items(self): """ Return list of ``key``/``value`` tuples for contents """ return self._scp.items(self._section)
GITHUB_USERNAME = os.environ['GITHUB_USERNAME'] GITHUB_PASSWORD = os.environ['GITHUB_PASSWORD'] repo_path = sys.argv[1] region = sys.argv[2] credential_file = sys.argv[3] hosts_file = repo_path + '/hosts.ini' region_setion = str(region) + ':children' parser = SafeConfigParser(allow_no_value=True) parser.optionxform = str parser.read(hosts_file) file_hosts_dict = {} for i in parser.items(region_setion): if 'stateful' not in i[0]: tenant_name = i[0] new_tenant_ip_list = [] tenant_ip_list = parser.items(tenant_name) for j in tenant_ip_list: new_tenant_ip_list.append(j[0]) file_hosts_dict[tenant_name] = new_tenant_ip_list new_host_dict = {} client = boto3.client('ec2', region_name=region, aws_access_key_id=ACCESS_KEY, aws_secret_access_key=SECRET_KEY) for i in file_hosts_dict.keys(): real_ip_list = []
class Summary: """Handles battery summary data""" def __init__(self): self.currenttime = time.localtime() printtime = time.strftime("%Y%m%d%H%M%S ", self.currenttime) self.logfile = open(config['files']['logfile'], 'a', 0) self.sampletime = time.time() self.prevtime = time.localtime() try: self.summaryfile = SafeConfigParser() self.summaryfile.read(config['files']['summaryfile']) self.summary = {} for section in self.summaryfile.sections(): self.summary[section] = {} for key, val in self.summaryfile.items(section): self.summary[section][key] = literal_eval(val) # daysummaryfile = open('/media/75cc9171-4331-4f88-ac3f-0278d132fae9/daysummary','r') # self.daydata = literal_eval(daysummaryfile.read()) # daysummaryfile.close() except IOError: pass # summary = open('/media/75cc9171-4331-4f88-ac3f-0278d132fae9/summary','w') # pickle.dump(hivolts, summary) # pickle.dump(lowvolts, summary) # summary.close() if self.summary['hour']['timestamp'][0:10] != printtime[0:10]: self.summary['hour'] = deepcopy(self.summary['current']) if self.summary['currentday']['timestamp'][0:8] != printtime[0:8]: self.summary['currentday'] = deepcopy(self.summary['current']) if self.summary['monthtodate']['timestamp'][0:6] != printtime[0:6]: self.summary['monthtodate'] = deepcopy(self.summary['current']) if self.summary['yeartodate']['timestamp'][0:4] != printtime[0:4]: self.summary['yeartodate'] = deepcopy(self.summary['current']) def update(self, summary, batdata): """ Update 'current' section of summary data with 'batdata' and write realtime log """ summary['current']['maxvoltages'][numcells] = round( batdata.batvoltsav[numcells], 2) summary['current']['minvoltages'][numcells] = summary['current'][ 'maxvoltages'][numcells] if batdata.batcurrentav > -config['battery']['ilowcurrent']: summary['current']['maxnocharge'][numcells] = summary['current'][ 'maxvoltages'][numcells] if batdata.batcurrentav < config['battery']['ilowcurrent']: summary['current']['minnoload'][numcells] = summary['current'][ 'minvoltages'][numcells] summary['current']['ah'][2] = round(batdata.soc, 2) summary['current']['ah'][0] = summary['current']['ah'][2] summary['current']['ah'][1] = summary['current']['ah'][2] summary['current']['ah'][6] = round(batdata.inahtot, 2) # current from solar etc summary['current']['dod'][2] = round(batdata.socadj, 2) summary['current']['dod'][0] = summary['current']['dod'][2] summary['current']['dod'][1] = summary['current']['dod'][2] summary['current']['amps'][1] = round(batdata.batcurrentav, 1) summary['current']['amps'][0] = summary['current']['amps'][1] summary['current']['amps'][2] = round(batdata.incurrentav, 1) if batdata.ah > 0.0: summary['current']['ah'][5] = round(batdata.ah, 2) summary['current']['ah'][4] = 0.0 else: summary['current']['ah'][4] = round(batdata.ah, 2) summary['current']['ah'][5] = 0.0 if batdata.pwrbat > 0.0: summary['current']['power'][1] = round(batdata.pwrbattot, 6) summary['current']['power'][0] = 0.0 else: summary['current']['power'][0] = round(batdata.pwrbattot, 6) summary['current']['power'][1] = 0.0 summary['current']['power'][2] = round(batdata.pwrintot, 6) summary['current']['power'][3] = round(summary['current']['power'][0] - \ summary['current']['power'][2] + \ summary['current']['power'][1] ,6 ) # current to loads vprint = '' maxmaxvoltage = 0.0 minmaxvoltage = 5.0 for i in range(numcells): summary['current']['maxvoltages'][i] = round( batdata.deltav[i + 1], 3) maxmaxvoltage = max(maxmaxvoltage, summary['current']['maxvoltages'][i]) minmaxvoltage = min(minmaxvoltage, summary['current']['maxvoltages'][i]) summary['current']['minvoltages'][i] = summary['current'][ 'maxvoltages'][i] if batdata.batcurrentav > -config['battery']['ilowcurrent']: summary['current']['maxnocharge'][i] = summary['current'][ 'maxvoltages'][i] if batdata.batcurrentav < config['battery']['ilowcurrent']: summary['current']['minnoload'][i] = summary['current'][ 'minvoltages'][i] vprint = vprint + str(round(batdata.deltav[i + 1], 3)).ljust( 5, '0') + ' ' summary['current']['deltav'][0] = round(maxmaxvoltage - minmaxvoltage, 3) if batdata.batcurrentav < config['battery']['ilowcurrent']: summary['current']['deltav'][1] = summary['current']['deltav'][0] summary['current']['deltav'][2] = summary['current']['deltav'][0] vprint = vprint + str(round(batdata.batvoltsav[numcells], 2)).ljust( 5, '0') + ' ' vprint = vprint + str(summary['current']['deltav'][0]) + ' ' logdata = vprint + str(round(batdata.batcurrentav,1)) + \ ' ' + str(round(batdata.incurrentav,1)) + \ ' ' + str(round(batdata.soc,2)).ljust(5,'0') + \ ' ' + str(round(batdata.socadj,2)).ljust(5,'0') + '\n' # + '\033[1A' sys.stdout.write(logdata) # + '\033[1A' self.prevtime = self.currenttime self.currenttime = time.localtime() self.printtime = time.strftime("%Y%m%d%H%M%S ", self.currenttime) summary['current']['timestamp'] = "'" + self.printtime + "'" currentdata = self.printtime + logdata # currentdata = currentdata + ' ' # for i in range(numcells): # currentdata = currentdata + str(round(batdata.uncalvolts[i+1]-batdata.uncalvolts[i],3)) + ' ' # currentdata = currentdata + '\n' self.logfile.write(currentdata) def updatesection(self, summary, section, source): """ Update 'summary' section 'section' with data from 'source' """ section = summary[section] source = summary[source] section['deltav'][1] = max(section['deltav'][1], source['deltav'][1]) section['deltav'][2] = max(section['deltav'][2], source['deltav'][2]) section['deltav'][0] = min(section['deltav'][0], source['deltav'][0]) section['ah'][2] = max(section['ah'][2], source['ah'][2]) section['ah'][0] = min(section['ah'][0], source['ah'][0]) section['ah'][1] = (section['ah'][1] * section['ah'][3] + source['ah'][1]) section['ah'][4] = round(section['ah'][4] + source['ah'][4], 2) section['ah'][5] = round(section['ah'][5] + source['ah'][5], 2) section['ah'][6] = round(section['ah'][6] + source['ah'][6], 2) section['power'][0] = round(section['power'][0] + source['power'][0], 6) section['power'][1] = round(section['power'][1] + source['power'][1], 6) section['power'][2] = round(section['power'][2] + source['power'][2], 6) section['power'][3] = round(section['power'][3] + source['power'][3], 6) section['dod'][2] = max(section['dod'][2], source['dod'][2]) section['dod'][0] = min(section['dod'][0], source['dod'][0]) section['dod'][1] = (section['dod'][1] * section['ah'][3] + source['dod'][1]) section['ah'][3] += 1 section['ah'][1] = round(section['ah'][1] / section['ah'][3], 6) section['dod'][1] = round(section['dod'][1] / section['ah'][3], 6) section['dod'][3] = max(section['dod'][3], source['dod'][3]) section['amps'][1] = max(section['amps'][1], source['amps'][1]) section['amps'][0] = min(section['amps'][0], source['amps'][0]) section['amps'][2] = min(section['amps'][2], source['amps'][2]) for i in range(numcells + 1): section['maxvoltages'][i] = max(section['maxvoltages'][i], source['maxvoltages'][i]) section['minvoltages'][i] = min(section['minvoltages'][i], source['minvoltages'][i]) section['maxnocharge'][i] = max(section['maxnocharge'][i], source['maxnocharge'][i]) section['minnoload'][i] = min(section['minnoload'][i], source['minnoload'][i]) section['timestamp'] = summary['current']['timestamp'] def writesummary(self): """ Write summary file """ for section in self.summaryfile.sections(): for option in self.summaryfile.options(section): self.summaryfile.set(section, option, str(self.summary[section][option])) of = open(config['files']['summaryfile'], 'w') self.summaryfile.write(of) of.close() # def writehour(self, data): # hoursummaryfile=open('/media/75cc9171-4331-4f88-ac3f-0278d132fae9/hoursummary','a') # hoursummaryfile.write(data) # hoursummaryfile.close() # logsummary.set('alltime', 'maxvoltages') = round(max(literal_eval(logsummary.get('currentday','maxvoltages')),literal_eval(logsummary.get(),2) # logsummary.set('alltime', 'minvoltages') = round(min(literal_eval(logsummary.get('currentday','minvoltages')),batdata.batvoltsav[8]),2) # logsummary.set('alltime', 'ah') = round(max(literal_eval(logsummary.get('currentday','ah'))[1], batdata.soc/1000),2) # logsummary.set('alltime', 'ah') = round(min(literal_eval(logsummary.get('currentday','ah'))[0], batdata.soc/1000),2) # logsummary.set('alltime', 'current') = round(max(literal_eval(logsummary.get('alltime','current'))[1], batdata.batcurrentav/1000),2) # logsummary.set('alltime', 'current') = round(min(literal_eval(logsummary.get('alltime','current'))[0], batdata.batcurrentav/1000),2) def writeperiod(self, file, data): """ Append 'data' to 'file' for previous period """ periodfile = open(config['files'][file], 'a') writestr = '' y = self.summaryfile.items(data) for i in y: writestr = writestr + str(i) + "\n" writestr = writestr + "\n" periodfile.write(writestr) periodfile.close() def starthour(self, summary): """ Start new hour """ self.writeperiod('hoursummaryfile', 'hour') summary['hour']['ah'][3] = 0 # zero # of samples for av summary['hour'] = deepcopy(summary['current']) def startday(self, summary): """ Start new Day """ self.writeperiod('daysummaryfile', 'currentday') summary['prevday'] = deepcopy(summary['currentday']) summary['currentday']['ah'][3] = 0 # zero number of samples for av summary['current']['dod'][3] += 1 summary['currentday'] = deepcopy(summary['current']) def startmonth(self, summary): """ Start new month """ self.writeperiod('monthsummaryfile', 'monthtodate') summary['monthtodate']['ah'][3] = 0 # zero number of samples for av summary['monthtodate'] = deepcopy(summary['current']) filecopy(config['files']['summaryfile'], config['files']['summaryfile'] + self.printtime[0:8]) def startyear(self, summary): """ Start new year """ self.writeperiod('yearsummaryfile', 'yeartodate') summary['yeartodate']['ah'][3] = 0 # zero number of samples for av summary['yeartodate'] = deepcopy(summary['current']) self.logfile.close() rename(config['files']['logfile'], config['files']['logfile'] + str(int(self.printtime[0:4]) - 1)) self.logfile = open(config['files']['logfile'], 'a') def close(self): """ Close logging file ready for exit """ self.logfile.close()
#------- ApprovalProcessorGCN name = 'approval processor gcn' tests.append( (name, alert) ) ''' #------------------------------------------------- ### actually test the items for name, alert in tests: logger.info("TESTING: %s" % name) ### instantiate the item item = es.qid[name](alert, time.time(), dict(config.items(name)), gdb, annotate=opts.annotate, warnings=opts.warnings, logDir=opts.logDir, logTag=opts.logTag) # raise NotImplementedError('check object internals') execute(item, verbose=opts.verbose, Verbose=opts.Verbose, cadence=opts.cadence) # raise NotImplementedError('check object internals') logger.info("TESTING: %s complete" % name)
print "Running PyFiscalPrinter service." mainloop.run() else: from ConfigParser import SafeConfigParser DEBUG = '--debug' in sys.argv # leeo configuración (primer argumento o rece.ini por defecto) if len(sys.argv)>1 and not sys.argv[1].startswith("--"): CONFIG_FILE = sys.argv.pop(1) if DEBUG: print "CONFIG_FILE:", CONFIG_FILE config = SafeConfigParser() config.read(CONFIG_FILE) if config.has_section('CONTROLADOR'): conf = dict(config.items('CONTROLADOR')) else: conf = {} if '--ayuda' in sys.argv: print AYUDA sys.exit(0) if '--licencia' in sys.argv: print LICENCIA sys.exit(0) controlador = PyFiscalPrinter() controlador.LanzarExcepciones = True marca = conf.get("marca", "epson")
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key # Note: This key only used for development and testing. SECRET_KEY = secrets.get('secrets', 'SECRET_KEY') ########## END SECRET CONFIGURATION ########## DEBUG CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = config.getboolean('debug', 'DEBUG') # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug TEMPLATE_DEBUG = config.getboolean('debug', 'TEMPLATE_DEBUG') ########## END DEBUG CONFIGURATION ########## MANAGER CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#admins ADMINS = tuple(config.items('error mail')) # See: https://docs.djangoproject.com/en/dev/ref/settings/#managers MANAGERS = tuple(config.items('404 mail')) ########## END MANAGER CONFIGURATION ########## SESSION COOKIE CONFIGURATION SESSION_COOKIE_DOMAIN = config.get('cookies', 'SESSION_COOKIE_DOMAIN') ########## END SESSION COOKIE CONFIGURATION ########## TEST RUNNER CONFIG TEST_RUNNER = 'django.test.runner.DiscoverRunner' ########## END TEST RUNNER CONFIG ########## DATABASE CONFIGURATION DATABASE_USER = config.get('database', 'DATABASE_USER')
def generar_pdf(): CONFIG_FILE = "/home/web2py/pyafipws/sp/rece1.ini" config = SafeConfigParser() config.read(CONFIG_FILE) conf_fact = dict(config.items('FACTURA')) conf_pdf = dict(config.items('PDF')) from pyafipws.pyfepdf import FEPDF fepdf = FEPDF() # cargo el formato CSV por defecto (factura.csv) fepdf.CargarFormato(conf_fact.get("formato", "factura.csv")) # establezco formatos (cantidad de decimales) según configuración: fepdf.FmtCantidad = conf_fact.get("fmt_cantidad", "0.2") fepdf.FmtPrecio = conf_fact.get("fmt_precio", "0.2") # creo una factura de ejemplo H**O = True # obtengo el registro general del comprobante (encabezado y totales) id_comprobante = int(request.args[0]) reg = db(db.comprobante_afip.id==id_comprobante).select().first() tipo_cbte = reg.tipo_cbte punto_vta = reg.punto_vta cbte_nro = reg.cbte_nro fecha = reg.fecha_cbte #.strftime("%Y%m%d") # formato AAAAMMDD concepto = reg.concepto tipo_doc = reg.tipo_doc # 80: CUIT, 96: DNI nro_doc = reg.nro_doc.replace("-", "") # del cliente, sin rayita cbt_desde = cbte_nro; cbt_hasta = cbte_nro imp_total = reg.imp_total imp_tot_conc = reg.imp_tot_conc imp_neto = reg.imp_neto imp_iva = reg.impto_liq imp_trib = "0.00" imp_op_ex = reg.imp_op_ex fecha_cbte = fecha # Fechas del per�odo del servicio facturado y vencimiento de pago: if concepto > 1: fecha_venc_pago = reg.fecha_venc_pago fecha_serv_desde = reg.fecha_serv_desde fecha_serv_hasta = reg.fecha_serv_desde else: fecha_venc_pago = fecha_serv_desde = fecha_serv_hasta = None moneda_id = reg.moneda_id moneda_ctz = reg.moneda_ctz # datos generales del encabezado: incoterms = 'FOB' # solo exportación idioma_cbte = 1 # 1: es, 2: en, 3: pt # datos adicionales del encabezado: nombre_cliente = reg.nombre_cliente domicilio_cliente = reg.domicilio_cliente pais_dst_cmp = 212 # 200: Argentina, ver tabla id_impositivo = reg.id_impositivo # cat. iva (mercado interno) forma_pago = reg.forma_pago obs_generales = reg.obs obs_comerciales = reg.obs_comerciales # datos devueltos por el webservice (WSFEv1, WSMTXCA, etc.): motivo_obs = "Factura individual, DocTipo: 80, DocNro 30000000007 no se encuentra registrado en los padrones de AFIP." cae = reg.cae fch_venc_cae = reg.fecha_vto fepdf.CrearFactura(concepto, tipo_doc, nro_doc, tipo_cbte, punto_vta, cbte_nro, imp_total, imp_tot_conc, imp_neto, imp_iva, imp_trib, imp_op_ex, fecha_cbte, fecha_venc_pago, fecha_serv_desde, fecha_serv_hasta, moneda_id, moneda_ctz, cae, fch_venc_cae, id_impositivo, nombre_cliente, domicilio_cliente, pais_dst_cmp, obs_comerciales, obs_generales, forma_pago, incoterms, idioma_cbte, motivo_obs) # completo campos extra del encabezado: ok = fepdf.EstablecerParametro("localidad_cliente", "Hurlingham") ok = fepdf.EstablecerParametro("provincia_cliente", "Buenos Aires") # imprimir leyenda "Comprobante Autorizado" (constatar con WSCDC!) ok = fepdf.EstablecerParametro("resultado", "A") # tributos adicionales: tributo_id = 99 desc = 'Impuesto Municipal Matanza' base_imp = "100.00" alic = "1.00" importe = "1.00" fepdf.AgregarTributo(tributo_id, desc, base_imp, alic, importe) tributo_id = 4 desc = 'Impuestos Internos' base_imp = None alic = None importe = "0.00" fepdf.AgregarTributo(tributo_id, desc, base_imp, alic, importe) # subtotales por alícuota de IVA: iva_id = 5 # 21% base_imp = 100 importe = 21 fepdf.AgregarIva(iva_id, base_imp, importe) # detalle de artículos: registros = db(db.detalle_afip.comprobante_id==id_comprobante).select() for registro in registros: u_mtx = 123456 cod_mtx = 1234567890123 codigo = registro.codigo ds = registro.ds qty = registro.qty umed = 7 precio = registro.precio imp_iva = registro.imp_iva bonif = 0.00 iva_id = registro.iva_id importe = registro.imp_total despacho = u'' dato_a = "" fepdf.AgregarDetalleItem(u_mtx, cod_mtx, codigo, ds, qty, umed, precio, bonif, iva_id, imp_iva, importe, despacho, dato_a) # descuento general (a tasa 21%): u_mtx = cod_mtx = codigo = None ds = u"Bonificación/Descuento 10%" qty = precio = bonif = None umed = 99 iva_id = 5 if tipo_cbte in (1, 2, 3, 4, 5, 34, 39, 51, 52, 53, 54, 60, 64): # discriminar IVA si es clase A / M imp_iva = -2.21 else: imp_iva = None importe = -12.10 fepdf.AgregarDetalleItem(u_mtx, cod_mtx, codigo, ds, qty, umed, precio, bonif, iva_id, imp_iva, importe, "") # descripción (sin importes ni cantidad): u_mtx = cod_mtx = codigo = None qty = precio = bonif = iva_id = imp_iva = importe = None umed = 0 ds = u"Descripción Ejemplo" fepdf.AgregarDetalleItem(u_mtx, cod_mtx, codigo, ds, qty, umed, precio, bonif, iva_id, imp_iva, importe, "") # completo campos personalizados de la plantilla: fepdf.AgregarDato("custom-nro-cli", "Cod.123") fepdf.AgregarDato("custom-pedido", "1234") fepdf.AgregarDato("custom-remito", "12345") fepdf.AgregarDato("custom-transporte", "Camiones Ej.") # datos fijos: for k, v in conf_pdf.items(): fepdf.AgregarDato(k, v) if k.upper() == 'CUIT': fepdf.CUIT = v # CUIT del emisor para código de barras fepdf.CrearPlantilla(papel=conf_fact.get("papel", "legal"), orientacion=conf_fact.get("orientacion", "portrait")) fepdf.ProcesarPlantilla(num_copias=int(conf_fact.get("copias", 1)), lineas_max=int(conf_fact.get("lineas_max", 24)), qty_pos=conf_fact.get("cant_pos") or 'izq') salida = "/tmp/factura.pdf" fepdf.GenerarPDF(archivo=salida) fepdf.MostrarPDF(archivo=salida,imprimir=False) response.headers['Content-Type'] = "application/pdf" return open(salida, "rb")
driverClass = createDriverClass(args['system']) assert driverClass != None, "Failed to find '%s' class" % args['system'] driver = driverClass(args['ddl']) assert driver != None, "Failed to create '%s' driver" % args['system'] if args['print_config']: config = driver.makeDefaultConfig() print driver.formatConfig(config) print sys.exit(0) ## Load Configuration file if args['config']: logging.debug("Loading configuration file '%s'" % args['config']) cparser = SafeConfigParser() cparser.read(os.path.realpath(args['config'].name)) config = dict(cparser.items(args['system'])) else: logging.debug("Using default configuration for %s" % args['system']) defaultConfig = driver.makeDefaultConfig() config = dict( map(lambda x: (x, defaultConfig[x][1]), defaultConfig.keys())) config['reset'] = args['reset'] config['load'] = False config['execute'] = False if config['reset']: logging.info("Reseting database") driver.loadConfig(config) logging.info("Initializing TPC-C benchmark using %s" % driver) ## Create ScaleParameters scaleParameters = scaleparameters.makeWithScaleFactor( args['warehouses'], args['scalefactor'])
def main(): ''' Tool for writing the SFP Data Base and Init script of a WR Device (with WRCORE) ''' sfp_db = {} sfp_db['wr0-blue'] = [] sfp_db['wr1-blue'] = [] sfp_db['wr0-violet'] = [] sfp_db['wr1-violet'] = [] SFP_BLUE = "sfp add AXGE-1254-0531" SFP_VIOLET = "sfp add AXGE-3454-0531" parser = arg.ArgumentParser(description='EEPROM writing tool for WRCORE') parser.add_argument('--bus','-b',help='Bus',choices=['ethbone','serial'], \ required=True) parser.add_argument('--lun', '-l', help='Logical Unit (IP/Serial Port)', type=str, required=True) parser.add_argument('--input', '-i', help='Input .ini file', type=str, required=True) parser.add_argument('--debug','-d',help='Enable debug output',action="store_true", \ default=False) args = parser.parse_args() if args.bus == 'ethbone': uart = VUART_bridge('eth', args.lun, args.debug) else: uart = SerialBridge(port="/dev/ttyUSB%s" % args.lun, verbose=args.debug) # Hack for new releases of WRC-2P uart.open(interchartimeout=0.01) parser = SafeConfigParser() ret = parser.read(args.input) if ret == []: print("%s could not be opened" % (args.input)) # Write the delays for the SFP ports if "ports" in parser.sections(): print("Writing the port calibration values...") uart.sendCommand("sfp erase") time.sleep(0.5) # For serial # Every readen port is a combination of (SFP-SN@PORT,(tx,rx,alpha)) for port in parser.items("ports"): sfpsn, p = port[0].split('@') sfpsn = sfpsn.upper() # The parser reads the chars in lowercase dtx, drx, alpha = port[1].split(',') cmd = "sfp add %s %s %s %s %s" % (sfpsn, p, dtx, drx, alpha) uart.sendCommand(cmd) time.sleep(0.5) # For serial # Write the init script if "init" in parser.sections(): print("Writing init script...") uart.sendCommand("init erase") for item in parser.items("init"): time.sleep(0.5) # For serial uart.sendCommand("init add %s" % item[1]) print("Configuration writed")
if config.has_option('WSAA','URL') and not H**O: WSAA_URL = config.get('WSAA','URL') else: WSAA_URL = None #wsaa.WSAAURL if config.has_option('WSLTV','URL') and not H**O: WSLTV_URL = config.get('WSLTV','URL') else: WSLTV_URL = WSDL PROXY = config.has_option('WSAA', 'PROXY') and config.get('WSAA', 'PROXY') or None CACERT = config.has_option('WSAA', 'CACERT') and config.get('WSAA', 'CACERT') or None WRAPPER = config.has_option('WSAA', 'WRAPPER') and config.get('WSAA', 'WRAPPER') or None if config.has_section('DBF'): conf_dbf = dict(config.items('DBF')) if DEBUG: print "conf_dbf", conf_dbf else: conf_dbf = {} DEBUG = '--debug' in sys.argv XML = '--xml' in sys.argv if DEBUG: print "Usando Configuración:" print "WSAA_URL:", WSAA_URL print "WSLTV_URL:", WSLTV_URL print "CACERT", CACERT print "WRAPPER", WRAPPER # obteniendo el TA from wsaa import WSAA
class Config(object): def __init__(self, *paths): self.parser = SafeConfigParser() # read the configuration, and store the list of # config files which were successfully parsed self.sources = self.parser.read(paths) self.raw_data = {} self.normalized_data = {} self.data = {} # first pass: read in the raw data. it's all strings, since # ConfigParser doesn't seem to decode unicode correctly (yet) for sn in self.parser.sections(): items = self.parser.items(sn) self.raw_data[sn] = dict(items) # second pass: cast the values into int or bool where possible # (mostly to avoid storing "false", which evaluates to True) for sn in self.raw_data.keys(): self.normalized_data[sn] = {} for key, val in self.raw_data[sn].items(): self.normalized_data[sn][key] = \ self.__normalize_value(val) # third pass: iterate the normalized data, creating a # dict (self.data) containing the "real" configuration, # which may include things (magic, defaults, etc) not # present in the raw_data or normalized_data for sn in self.normalized_data.keys(): section_parser = "parse_%s_section" % (sn) # if this section has a special parser, call # it with the raw data, and store the result if hasattr(self, section_parser): self.data[sn] = \ getattr(self, section_parser)( self.normalized_data[sn]) # no custom section parser, so # just copy the raw data as-is else: self.data[sn] =\ self.normalized_data[sn].copy() def __normalize_value(self, value): """Casts a string to a bool, int, or float, if it looks like it should be one. This is a band-aid over the ini format, which assumes all values to be strings. Examples: "mudkips" => "mudkips" (str) "false", "FALSE", "no" => False (bool) "true", "TRUE", "yes" => True (bool) "1.0", "0001.00" => 1.0 (float) "0", "0000" => 0 (int)""" # shortcut for string boolean values if value.lower() in ["false", "no"]: return False elif value.lower() in ["true", "yes"]: return True # attempt to cast this value to an int, then a float. (a sloppy # benchmark of this exception-catching algorithm indicates that # it's faster than checking with a regexp) for func in [int, float]: try: func(value) except: pass # it's just a str # (NOT A UNICODE) return value def __import_class(self, class_tmpl): """Given a full class name (ie, webapp.app.App), returns the class object. There doesn't seem to be a built-in way of doing this without mucking with __import__.""" # break the class name off the end of module template # i.e. "ABCD.app.App" -> ("ABC.app", "App") try: split_module = class_tmpl.rsplit(".", 1) module = __import__(split_module[0], {}, {}, split_module[1:]) #module = __import__(class_tmpl, {}, {}, []) # import the requested class or None if len(split_module) > 1 and hasattr(module, split_module[-1]): return getattr(module, split_module[-1]) else: return module except ImportError, e: logging.error("App import error: " + str(e)) pass
from ConfigParser import SafeConfigParser import os import json import re PLUGIN_DIR = '../lib/ar_server/plugins/' OUT_JSON = 'ar_modules.json' plugin_configs = [p for p in os.listdir(PLUGIN_DIR) if re.search('yapsy', p)] plugins_data = [] for plugin_config in plugin_configs: pfile = os.path.join(PLUGIN_DIR, plugin_config) parser = SafeConfigParser() parser.read(pfile) pd = dict( dict(parser.items('Core')).items() + dict(parser.items('Settings')).items() + dict({ 'parameters': dict(parser.items('Parameters')).items() }).items() + dict(parser.items('Documentation')).items()) plugins_data.append(pd) with open(OUT_JSON, 'w') as outfile: json.dump(plugins_data, outfile)
help= "Pattern from which output file is created by interpolation with variable values." ) ns = parse(add_arguments, args=sys.argv[1:i]) prog = sys.argv[i] args = sys.argv[i + 1:] argvars = VarArgGenerator() if ns.var_file: config = SafeConfigParser() config.read(ns.var_file) if config.has_section('constants'): argvars.update(config.items('constants')) if config.has_section['variables']: argvars.update( dict(parse_vars(m) for m in config.items('variables'))) if ns.var: argvars.update(ns.var) if ns.glob: for name, globstr in ns.glob.iteritems(): argvars.variables[name] = glob(globstr) argvars.add_oper(name, "{0}_base".format(name), lambda x: os.path.basename(x)) argvars.add_oper(name, "{0}_noext".format(name), lambda x: os.path.splitext(x)[0])
class Session(object): """Configuration session. Encapsulates all configuration for a given test run. .. attribute :: argparse An instance of :class:`argparse.ArgumentParser`. Plugins can use this directly to add arguments and argument groups, but *must* do so in their ``__init__`` methods. .. attribute :: pluginargs The argparse argument group in which plugins (by default) place their command-line arguments. Plugins can use this directly to add arguments, but *must* do so in their ``__init__`` methods. .. attribute :: hooks The :class:`nose2.events.PluginInterface` instance contains all available plugin methods and hooks. .. attribute :: plugins The list of loaded -- but not necessarily *active* -- plugins. .. attribute :: verbosity Current verbosity level. Default: 1. .. attribute :: startDir Start directory of test run. Test discovery starts here. Default: current working directory. .. attribute :: topLevelDir Top-level directory of test run. This directory is added to sys.path. Default: starting directory. .. attribute :: libDirs Names of code directories, relative to starting directory. Default: ['lib', 'src']. These directories are added to sys.path and discovery if the exist. .. attribute :: testFilePattern Pattern used to discover test module files. Default: test*.py .. attribute :: testMethodPrefix Prefix used to discover test methods and functions: Default: 'test'. .. attribute :: unittest The config section for nose2 itself. """ configClass = config.Config def __init__(self): self.argparse = argparse.ArgumentParser(prog="nose2", add_help=False) self.pluginargs = self.argparse.add_argument_group( "plugin arguments", "Command-line arguments added by plugins:" ) self.config = ConfigParser() self.hooks = events.PluginInterface() self.plugins = [] # this will be reset later, whenever handleCfgArgs happens, but it # starts at 1 so that it always has a non-negative integer value self.verbosity = 1 self.startDir = None self.topLevelDir = None self.testResult = None self.testLoader = None self.logLevel = logging.WARN self.configCache = dict() def get(self, section): """Get a config section. :param section: The section name to retreive. :returns: instance of self.configClass. """ # If section exists in cache, return cached version if section in self.configCache: return self.configCache[section] # If section doesn't exist in cache, parse config file # (and cache result) items = [] if self.config.has_section(section): items = self.config.items(section) self.configCache[section] = self.configClass(items) return self.configCache[section] def loadConfigFiles(self, *filenames): """Load config files. :param filenames: Names of config files to load. Loads all names files that exist into ``self.config``. """ self.config.read(filenames) def loadPlugins(self, modules=None, exclude=None): """Load plugins. :param modules: List of module names from which to load plugins. """ # plugins set directly if modules is None: modules = [] if exclude is None: exclude = [] # plugins mentioned in config file(s) cfg = self.unittest more_plugins = cfg.as_list("plugins", []) cfg_exclude = cfg.as_list("exclude-plugins", []) exclude.extend(cfg_exclude) exclude = set(exclude) all_ = (set(modules) | set(more_plugins)) - exclude all_ = sorted(all_) log.debug("Loading plugin modules: %s", all_) for module in all_: self.loadPluginsFromModule(util.module_from_name(module)) self.hooks.pluginsLoaded(events.PluginsLoadedEvent(self.plugins)) def loadPluginsFromModule(self, module): """Load plugins from a module. :param module: A python module containing zero or more plugin classes. """ avail = [] for entry in dir(module): try: item = getattr(module, entry) except AttributeError: pass try: if issubclass(item, events.Plugin) and not item == events.Plugin: avail.append(item) except TypeError: pass for cls in avail: log.debug("Plugin is available: %s", cls) plugin = cls(session=self) if plugin not in self.plugins: self.plugins.append(plugin) for method in self.hooks.preRegistrationMethods: if hasattr(plugin, method): self.hooks.register(method, plugin) def registerPlugin(self, plugin): """Register a plugin. :param plugin: A `nose2.events.Plugin` instance. Register the plugin with all methods it implements. """ log.debug("Register active plugin %s", plugin) if plugin not in self.plugins: self.plugins.append(plugin) for method in self.hooks.methods: if hasattr(plugin, method): log.debug("Register method %s for plugin %s", method, plugin) self.hooks.register(method, plugin) def setVerbosity(self, args_verbosity, args_verbose, args_quiet): """ Determine verbosity from various (possibly conflicting) sources of info :param args_verbosity: The --verbosity argument value :param args_verbose: count of -v options :param args_quiet: count of -q options start with config, override with any given --verbosity, then adjust up/down with -vvv -qq, etc """ self.verbosity = self.unittest.as_int("verbosity", 1) if args_verbosity is not None: self.verbosity = args_verbosity # adjust up or down, depending on the difference of these counts self.verbosity += args_verbose - args_quiet # floor the value at 0 -- verbosity is always a non-negative integer self.verbosity = max(self.verbosity, 0) def setStartDir(self, args_start_dir=None): """ start dir comes from config and may be overridden by an argument """ self.startDir = self.unittest.as_str("start-dir", ".") if args_start_dir is not None: self.startDir = args_start_dir def prepareSysPath(self): """Add code directories to sys.path""" tld = self.topLevelDir sd = self.startDir if tld is None: tld = sd tld = os.path.abspath(tld) util.ensure_importable(tld) for libdir in self.libDirs: libdir = os.path.abspath(os.path.join(tld, libdir)) if os.path.exists(libdir): util.ensure_importable(libdir) # convenience properties @property def libDirs(self): return self.unittest.as_list("code-directories", ["lib", "src"]) @property def testFilePattern(self): return self.unittest.as_str("test-file-pattern", "test*.py") @property def testMethodPrefix(self): return self.unittest.as_str("test-method-prefix", "test") @property def unittest(self): return self.get("unittest") def isPluginLoaded(self, pluginName): """Returns ``True`` if a given plugin is loaded. :param pluginName: the name of the plugin module: e.g. "nose2.plugins.layers". """ for plugin in self.plugins: if pluginName == plugin.__class__.__module__: return True return False
cur.SAMObject.store(observed=cur.Y, inputs=cur.X, Q=Q, kernel=kernel, num_inducing=model_num_inducing) SAMCore.load_pruned_model(fname, economy_save, cur.SAMObject.model) mm.append(cur) #open ports yarp.Network.init() sect = splitPath[0].split('/')[-1].lower() parser2 = SafeConfigParser() parser2.read(interactionConfPath) portNameList = parser2.items(sect) print portNameList portsList = [] for j in range(len(portNameList)): if (portNameList[j][0] == 'rpcbase'): portsList.append(yarp.RpcServer()) portsList[j].open(portNameList[j][1] + ':i') svPort = j elif (portNameList[j][0] == 'callsign'): callSignList = portNameList[j][1].split(',') else: parts = portNameList[j][1].split(' ') print parts if (parts[1].lower() == 'imagergb'): portsList.append(yarp.BufferedPortImageRgb())
def __init__(self, fp): self.fp = fp self.sechead = '[all]\n' def readline(self): if self.sechead: try: return self.sechead finally: self.sechead = None else: s = self.fp.readline() if s.find('#') != -1: s = s[0:s.find('#')].strip() +"\n" return s config_parser = SafeConfigParser() config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "unbreakablecoin.conf")))) return dict(config_parser.items("all")) def connect_JSON(config): """Connect to a bitcoin JSON-RPC server""" testnet = config.get('testnet', '0') testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False if not 'rpcport' in config: config['rpcport'] = 19335 if testnet else 9335 connect = "http://%s:%[email protected]:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport']) try: result = ServiceProxy(connect) # ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors, # but also make sure the unbreakablecoind we're talking to is/isn't testnet: if result.getmininginfo()['testnet'] != testnet: sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n") sys.exit(1)
from ConfigParser import SafeConfigParser import sys config_dir = './config.ini' config = SafeConfigParser() config.read(config_dir) # All options must be set as strings. config.add_section('tmp_section') config.set('tmp_section', 'name', 'wujimaster') config.set('tmp_section', 'job', 'devops') for section in config.sections(): print(section) for name, value in config.items(section): print(' %s = %r' % (name, value))