def load(self, filename=None): """ Load a config file """ if not filename: filename = self.__config_file try: data = open(filename, "rb").read() except IOError as e: log.warning("Unable to open config file %s: %s", filename, e) return # load mirror ini config file config = ConfigParser() config.read(filename) for section in config.sections(): value = {} for item in config.items(section): if item[0].endswith("[]") and item[0] not in value: value[item[0]] = [item[1]] continue if item[0] in value and type(value[item[0]]) == list: value[item[0]].append(item[1]) else: value[item[0]] = item[1] self.set_item(section, value)
def get_project_config(): path = join(get_project_dir(), "platformio.ini") if not isfile(path): raise exception.NotPlatformProject(get_project_dir()) cp = ConfigParser() cp.read(path) return cp
def run_it(self, ini_file, expected_outputs, clean_up=False): """ Runs the application and checks the output with the expected output. Will clean up output files if clean_up is set to true. Parameters: - ini_file: configuration file to be passed into runapplication - expected_outputs: a dictionary of expected outputs - clean_up: if it should clean newly made files or not Throws: Assertion error if the files do not match. """ config = ConfigParser() # read the init file config.read(ini_file) # grab application name application = config['global_settings']['application'] # run application test_output = self._call_runapplication(expected_outputs.keys(), \ ini_file) for table in expected_outputs: # get outputs test_list, expected_list = \ self._list_outputs(test_output[table], expected_outputs[table]) # check for similarity self._diff_checker(test_list, expected_list) if clean_up: for output in test_output: os.remove(test_output[output]) allFiles = [k for k in os.listdir() if \ (application in k and '.log' in k)] newestLog = max(allFiles, key=os.path.getctime) os.remove(newestLog)
def _loadConfig(args): import os config = None config_file = None if args.config: config_file = os.path.abspath(config_file) elif args.no_config is False: config_file = DEFAULT_CONFIG if not config_file: return None if os.path.isfile(config_file): try: config = ConfigParser() config.read(config_file) except ConfigParserError as ex: eyed3.log.warning("User config error: " + str(ex)) return None elif config_file != DEFAULT_CONFIG: raise IOError("User config not found: %s" % config_file) return config
class Config(object): _isInstantiated = False _root_mode = None def __init__(self): self._root_config = ConfigParser() self._root_config.read(get_config_files()) dictionary = {} for section in self._root_config.sections(): dictionary[section] = {} for option in self._root_config.options(section): dictionary[section][option] = self._root_config.get(section, option) self.__dict__ = dictionary def dict(self): return self.__dict__ @staticmethod def write(path, section, key, value): config = ConfigParser() config.read(path) config[section][key] = value with open(path, 'w') as configfile: config.write(configfile)
def get_credentials(): config = ConfigParser() config.read(DEFAULT_PATH) if DEFAULT_SECTION not in config: config = create_credentials() return config[DEFAULT_SECTION]
def getRPiSettings(settingsLocation = "RPi_settings.ini"): settings = ConfigParser() settings.read(settingsLocation) settingsDict = {} settingsDict["piID"] = settings["RPi"]["piID"] # Distinguish between strings/numbres/boolean so we can convert them settingsToGrabNumbers = ["adxl_interval", "save_interval", "x_thresh", "y_thresh", "z_thresh"] settingsToGrabStrings = ["up_orient", "east_orient", "north_orient"] settingsToGrabBoolean = ["checkForSignificance"] for type in settingsToGrabStrings: settingsDict[type] = settings["Main"][type] for type in settingsToGrabNumbers: settingsDict[type] = float(settings["Main"][type]) for type in settingsToGrabBoolean: settingsDict[type] = (settings["Main"][type] == "True") settingsDict["counterMax"] = (settingsDict["save_interval"]*60)/settingsDict["adxl_interval"] settingsDict["dataFolder"] = settings["Upload"]["dataFolder"] return settingsDict
def configure(self): raw_config = ConfigParser() filepath = os.path.join(os.path.expanduser("~"), ".config", "copr") if raw_config.read(filepath): if PY3: self.copr_url = raw_config["copr-cli"].get("copr_url", None) else: self.copr_url = raw_config.get("copr-cli", "copr_url", None) if self.copr_url != "https://copr.fedorainfracloud.org": print(_("Warning: we are using non-standard Copr URL '{}'.").format(self.copr_url)) # Useful for forcing a distribution copr_plugin_config = ConfigParser() config_file = None for path in self.base.conf.pluginconfpath: test_config_file = '{}/{}.conf'.format(path, PLUGIN_CONF) if os.path.isfile(test_config_file): config_file = test_config_file if config_file is not None: copr_plugin_config.read(config_file) if copr_plugin_config.has_option('main', 'distribution') and copr_plugin_config.has_option('main', 'releasever'): distribution = copr_plugin_config.get('main', 'distribution') releasever = copr_plugin_config.get('main', 'releasever') self.chroot_config = [distribution, releasever] else: self.chroot_config = [False, False]
def from_configparser(filepath): """Have an ini file that the python configparser can understand? Pass the filepath to this function, and a matching Configuration will magically be returned.""" if not os.path.exists(filepath): logging.error(_('configuration file not found: %(filepath)s'), {'filepath':filepath}) return None if not os.path.isfile(filepath): logging.error(_('configuration path is not a file: %(filepath)s'), {'filepath':filepath}) return None try: from configparser import ConfigParser except ImportError: from backport.configparser import ConfigParser cfgp = ConfigParser() with open(filepath, encoding='utf-8') as fp: cfgp.readfp(fp) dic = OrderedDict() for section_name in cfgp.sections(): if 'DEFAULT' == section_name: section_name = '' for name, value in cfgp.items(section_name): value += '' # inner workaround for python 2.6+ # transforms ascii str to unicode because # of unicode_literals import dic[Key(section_name) + name] = value return Configuration.from_mapping(dic)
def parse(self, file: str) -> None: """ Reads configuration file (config.ini) which contains all settings for application by different sections like: Storage, GPIO, etc. :param file: """ config = ConfigParser() config.read(file) channel = config.get("ADC", "channel") pins = self.__read_adc_pins(config) self.__adc_config = ADCConfig(channel=int(channel), pins=pins) logger.info("ADC CHANNEL = {}".format(channel)) logger.info("ADC PINS SETUP:") logger.info(" MISO = {}".format(pins.miso)) logger.info(" MOSI = {}".format(pins.mosi)) logger.info(" CLK = {}".format(pins.clk)) logger.info(" CS = {}".format(pins.cs)) # self.__storage.interval = self.__config.get("Storage", "interval") # self.__storage.url = self.__config.get("Storage", "url") # validate all properties before actual collecting execution self.__validate()
def __send_mail(to, subject, body): """ Sends an email to the operators. :param str|list[str] to: The email addresses of the operator(s). :param str subject: The subject op the email. :param str body: The email body. """ config = ConfigParser() config.read(os.path.join(enarksh.HOME, 'etc/enarksh.cfg')) from_email = config.get('controller', 'email') # Concat To mail addresses to_email = '' if isinstance(to, list): for email in to: if to_email: to_email += ', ' to_email += email else: to_email = to msg = MIMEText(body) msg['Subject'] = subject msg['To'] = to_email msg['From'] = from_email # Send the message via our local SMTP server. s = smtplib.SMTP('localhost') s.send_message(msg) s.quit()
def change_initial_setup_to_false(): config = ConfigParser() config.read(conf_path) config['setup']['initial_setup'] = 'false' f = open(conf_path, 'w') config.write(f) f.close()
def run( self ): print('Available commands:') config = ConfigParser() config.read('repl.ini', encoding='utf-8' ) section = config['PluginsAliases'] cmds = [plugin for plugin in section] print(', '.join(sorted(cmds) ))
def test_initialize(): res = client.post('/login', data={ 'username': '******', 'password': '******' }) eq_(302, res.status_code) eq_('http://localhost/initialize', res.headers['Location']) conf_dict = {'user_name': 'test-user', 'user_password': '******', 'mode': 'local', 'ldap_address': '', 'ldap_port': '', 'ldap_basedn': '', 'java_home': '/usr/lib/jvm/java-8-oracle', 'redpen_conf_path': os.path.expanduser( '~/redpen/conf/redpen-conf-en.xml')} res = client.post('/saveConfig', data=json.dumps(conf_dict), content_type='application/json') eq_(200, res.status_code) config = ConfigParser() config.read(conf_path) eq_('false', config['setup']['initial_setup']) eq_('local', config['auth']['method']) eq_('', config['ldap']['server']) eq_('', config['ldap']['port']) eq_('', config['ldap']['base_dn']) eq_('/usr/lib/jvm/java-8-oracle', config['redpen']['java_home']) eq_(os.path.expanduser('~/redpen/conf/redpen-conf-en.xml'), config['redpen']['conf'])
def teardown(): config = ConfigParser() config.read(conf_path) config['setup']['initial_setup'] = 'true' config['auth']['method'] = 'local' config['ldap']['server'] = '' config['ldap']['port'] = '' config['ldap']['base_dn'] = '' config['redpen']['java_home'] = '' config['redpen']['conf'] = '' config['dev']['check_csrf'] = 'true' f = open(conf_path, 'w') config.write(f) f.close() con = sqlite3.connect(db_path) cur = con.cursor() sql = 'UPDATE user SET username=(?), password=(?) WHERE username=(?)' cur.execute(sql, ('Admin', gen_pass_hash('webtex'), 'test-user',)) con.commit() cur.close() con.close() res = client.get('/') eq_(302, res.status_code) eq_('http://localhost/login', res.headers['Location']) res = client.get('/initialize') eq_(302, res.status_code) eq_('http://localhost/login', res.headers['Location']) res = client.get('/login') eq_(200, res.status_code)
class c2mon(object): def __init__(self, config_path='etc/c2mon.ini'): # load configuration settings self.config_path = config_path self.config = ConfigParser() self.config.read(self.config_path) # initialize database self.db_path = self.config[CFG_SECTION_GLOBAL][CFG_OPTION_DB_PATH] # create the database if it does not exist if not os.path.exists(self.db_path): logger.debug("creating database @ {}".format(self.db_path)) with sqlite3.connect(self.db_path) as connection: cursor = connection.cursor() for sql in SQL_CREATE: cursor.execute(sql) connection.commit() def start(self): pass def stop(self): pass def run(self): pass def execute(self): pass
def _get_versions(self): logger.info("Parsing version information") config = ConfigParser(interpolation=ExtendedInterpolation()) config.optionxform = str path = os.path.join(self.location, 'versions.cfg') config.readfp(open(path)) return config['versions']
def _generate_timing_cfg(self, registrator, file_path): ''' creates a raw timing ini config file Input: registrator Registrator object corresponding to the mapping of configurations loaded file_path string path to the .ini file containing the configuration Output: - ''' # extract types dicts cfg = ConfigParser() simp_t = registrator.reg_simple_timings db_vals = registrator.db_lookup_timings # create config cfgfile = open(file_path, 'w') self._add_sections(cfg, simp_t.keys()) self._add_sections(cfg, db_vals.keys()) # add variants self._add_simp_variants(cfg, simp_t) self._add_simp_variants(cfg, db_vals) # write file cfg.write(cfgfile) cfgfile.close()
def read_config(sFname): """ Reads config information : config & parameters args: fname : filename of the config file returns : 1. base_path : the absolute path of the base data directory 2. companies_list : list of companies included, as strings 3. folder_dict: dictionary containing all potential dircetories """ from configparser import ConfigParser # First reading the config file - everything that is specific to that computer config = ConfigParser() path_config = os.path.join(config_path, sFname) if os.path.isfile(path_config): config.read(path_config) else: raise Exception('no config file at {}. Aborting.'.format(path_config)) list_dirs = ['root', 'data'] dir_dict = {} for d in list_dirs: dir_dict[d] = config.get('app_details', d) # Creates any missing directory for path in dir_dict.values(): if not os.path.exists(path): os.makedirs(path) return dir_dict
def parse_info(self, info_path): """ When installing an agent, parse the information file and return a dictionary with the information. In the case the specific field is not present, give value None and continue. """ info = ConfigParser() # Add a dummy section info.read_string(StringIO( "[info]\n%s" % open(info_path).read()).read()) data = { "agent": None, "version": None, "license": None, "maintainer": None, "script": None, "topics": None, "description": None } for key in info["info"].keys(): data[key] = info["info"][key] return data
class API: def __init__(self): self.keyf = 'keys.ini' self.config = ConfigParser() self.config.read(self.keyf) auth = [self.config['Key'][k] for k in ['CK', 'CS', 'AT', 'AS']] self.auth = OAuth1(*auth) self.url = 'https://api.twitter.com/1.1/' self.user_id = self.config['UserID']['ID'] self.last_id = self.config['LastID']['ID'] self.slack_url = self.config['Slack']['url'] def get_tweets(self): url = self.url + 'statuses/user_timeline.json' params = {'user_id': self.user_id, 'since_id': self.last_id, 'include_rts': 'true', 'count': 200} response = requests.get(url, params=params, auth=self.auth) return response.json() def set_last_id(self, num): self.config['LastID']['ID'] = str(num) self.config.write(open(self.keyf, 'w')) def post2slack(self, text, name, icon_url=None,icon_emoji=None): if icon_url is not None: j = {'text': text, 'username': name, 'icon_url': icon_url} elif icon_emoji is not None: j = {'text': text, 'username': name, 'icon_emoji': icon_emoji} else: assert False, 'icon_url or icon_emoji is required' params = {'payload': json.dumps(j)} r = requests.post(self.slack_url, data=params)
def read(self, filename): """Reads the file specified by filename This method will load the eapi.conf file specified by filename into the instance object. It will also add the default connection localhost if it was not defined in the eapi.conf file Args: filename (str): The full path to the file to load """ try: SafeConfigParser.read(self, filename) except SafeConfigParserError as exc: # Ignore file and syslog a message on SafeConfigParser errors syslog_warning("%s: parsing error in eapi conf file: %s" % (type(exc).__name__, filename)) self._add_default_connection() for name in self.sections(): if name.startswith('connection:') and \ 'host' not in dict(self.items(name)): self.set(name, 'host', name.split(':')[1]) self.generate_tags()
def get_IPD_counter(config_file, lock_file, settings, log): """checks if IPD_counter is currently locked; if not, gets current counter; if yes, creates lock_file + returns counter + config-parser object (later needed to update the value) """ log.debug("Getting current count of IPD submissions...") if os.path.isfile(lock_file): msg = "Another user is currently creating IPD files.\n" msg += "Please try again in a minute or so, to make sure you don't create files with the same IPD number." log.warning(msg) return False, msg if settings["modus"] == "productive": with open(lock_file, "w") as _: # create lockfile log.debug("Creating IPD counter lockfile under {}...".format(lock_file)) os.utime(lock_file) cf = ConfigParser() cf.read(config_file) num = cf.get("Counter", "ipd_submissions") try: num = int(num) except Exception as E: log.error(E) log.exception(E) msg = "ipd_submissions counter must be an integer. '{}' is not!".format(num) os.remove(lock_file) return False, msg log.debug("\tCurrent count = {}".format(num)) return True, (num, cf)
def __init__(self): # If conf_fp is None, we default to the test configuration file try: conf_fp = environ['QIITA_CONFIG_FP'] except KeyError: conf_fp = join(dirname(abspath(__file__)), 'support_files/config_test.cfg') self.conf_fp = conf_fp # Parse the configuration file config = ConfigParser() with open(conf_fp, 'U') as conf_file: config.readfp(conf_file) _required_sections = {'main', 'redis', 'postgres', 'smtp', 'ebi', 'portal'} if not _required_sections.issubset(set(config.sections())): missing = _required_sections - set(config.sections()) raise MissingConfigSection(', '.join(missing)) self._get_main(config) self._get_smtp(config) self._get_torque(config) self._get_postgres(config) self._get_redis(config) self._get_ebi(config) self._get_vamps(config) self._get_portal(config) self._iframe(config)
def parse_sentry_configuration(filename): """Parse Sentry DSN out of an application or Sentry configuration file""" filetype = os.path.splitext(filename)[-1][1:].lower() if filetype == 'ini': # Pyramid, Pylons config = ConfigParser() config.read(filename) ini_key = 'dsn' ini_sections = ['sentry', 'filter:raven'] for section in ini_sections: if section in config: print('- Using value from [{section}]:[{key}]' .format(section=section, key=ini_key)) try: return config[section][ini_key] except KeyError: print('- Warning: Key "{key}" not found in section ' '[{section}]'.format(section=section, key=ini_key)) raise SystemExit('No DSN found in {file}. Tried sections [{sec_list}]' .format( file=filename, sec_list='], ['.join(ini_sections), )) elif filetype == 'py': # Django, Flask, Bottle, ... raise SystemExit('Parsing configuration from pure Python (Django,' 'Flask, Bottle, etc.) not implemented yet.') else: raise SystemExit('Configuration file type not supported for parsing: ' '%s' % filetype)
def parse_config_file(parser, stdin_args): """Parse config file. Returns a list of additional args. """ config_args = [] # Temporary switch required args and save them to restore. required_args = [] for action in parser._actions: if action.required: required_args.append(action) action.required = False parsed_args = parser.parse_args(stdin_args) # Restore required args. for action in required_args: action.required = True if not parsed_args.config_file: return config_args config = ConfigParser() if not config.read(parsed_args.config_file): sys.stderr.write('Config file "%s" doesn\'t exists\n' % parsed_args.config_file) sys.exit(7) # It isn't used anythere. config_args = _convert_config_to_stdin(config, parser) return config_args
def fncPluginVersion(): config = ConfigParser() config.read(os.path.join(os.path.dirname(__file__),'metadata.txt')) #name = config.get('general', 'name') #description = config.get('general', 'description') return config.get('general', 'version')
def __init__(self, filename=None): SafeConfigParser.__init__(self) self.filename = filename self.tags = dict() self.autoload()
def moodle_config_skeleton(course_name, moodle_id): from configparser import ConfigParser config = ConfigParser() config.add_section("course") config['course']['course_name'] = course_name config['course']['moodle_id'] = str(moodle_id) return config
def makeService(self, options): if options['config'] is None: print('Config file not specified') exit(1) config = ConfigParser() config.optionxform = str config.read([options['config']]) token = config['telegrambot']['token'] proxy = config['proxy'].get('address', None) if proxy: os.environ['http_proxy'] = 'http://%s' % proxy os.environ['https_proxy'] = 'https://%s' % proxy msg_plugins = [v for v in config['message_plugins'].values()] for key, value in config['env'].items(): os.environ[key] = value multi = service.MultiService() bot = BotService(plugin_filespec=msg_plugins) bot.setServiceParent(multi) client = TwistedClient(token, bot.on_update, proxy=proxy) client.setServiceParent(multi) return multi
from configparser import ConfigParser from collections import namedtuple from sys import exit CatalogueSpecifier = namedtuple( 'CatalogueSpecifier', ['command', 'path', 'mediadir', 'description'] ) config = ConfigParser() config.read('./config.ini') BOTTOKEN = config.get('config', 'bottoken', fallback=None) if not BOTTOKEN: print('Please give "bottoken" in your config.ini') exit(1) WELCOME = config.get('config', 'welcome', fallback=None) if not WELCOME: print('Please give "welcome" in your config.ini') exit(1) catalogue_section = config['catalogues'] CATALOGUESPECIFIERS = [] for key, value in catalogue_section.items(): path_json, mediadir, description = value.split(':') CATALOGUESPECIFIERS.append( CatalogueSpecifier( command=key,
"""Get all the security issues""" import os from configparser import ConfigParser from cbw_api_toolbox.cbw_api import CBWApi CONF = ConfigParser() CONF.read( os.path.join(os.path.abspath(os.path.dirname(__file__)), '..', 'api.conf')) CLIENT = CBWApi(CONF.get('cyberwatch', 'url'), CONF.get('cyberwatch', 'api_key'), CONF.get('cyberwatch', 'secret_key')) CLIENT.security_issues()
import time import logging import tenacity from configparser import ConfigParser from database import Database_test logging.basicConfig( level=logging.DEBUG, format= '%(asctime)s %(levelname)s %(process)d %(name)s.%(lineno)d %(message)s', datefmt='[%Y-%m_%d %H:%M:%S]', filename='create_bms.log', filemode='a') logger = logging.getLogger(__name__) cp = ConfigParser() cp.read("bms.ini") REST_SERVER = cp.get("rest", "rest_service") REST_SERVER_PORT = cp.get("rest", "rest_service_port") USERNAME = cp.get('ipmi', 'username') PASSWORD = cp.get('ipmi', 'password') MODE = cp.get('ipmi', 'mode') OS_VERSION = cp.get('image', 'os_version') NETMASK = cp.get('image', 'netmask') TIMEOUT = int(cp.get('image', 'deploy_image_timeout')) def read_file(f_name): with open(f_name, "r") as fp: return fp.readline()
https://docs.djangoproject.com/en/2.1/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.1/ref/settings/ """ import os from configparser import ConfigParser, RawConfigParser from loguru import logger # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # 配置文件和数据文件目录 CONFIG_DIR = os.path.join(BASE_DIR, 'config') CONFIG = ConfigParser() CONFIG.read(os.path.join(CONFIG_DIR, 'config.ini'), encoding='utf-8') # 日志文件配置 LOG_DIR = os.path.join(BASE_DIR, 'log') if os.path.exists(LOG_DIR) is False: os.makedirs(LOG_DIR) logger.add(os.path.join(LOG_DIR, 'error.log'), rotation='1 days', retention='30 days', encoding='utf-8') # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
import os import socket import threading import logging log = logging.getLogger('testutils.py') try: from configparser import ConfigParser except ImportError: from ConfigParser import ConfigParser from stomp import StatsListener, WaitingListener from stomp.backward import * config = ConfigParser() config.read(os.path.join(os.path.dirname(__file__), 'setup.ini')) def get_environ(name): try: return os.environ[name] except: return None def get_standard_host(): host = config.get('default', 'host') port = config.get('default', 'port') return [(get_environ('STD_HOST') or host, int(get_environ('STD_PORT') or port))]
def _from_text(cls, text): config = ConfigParser(delimiters='=') # case sensitive: https://stackoverflow.com/q/1611799/812183 config.optionxform = str config.read_string(text) return cls._from_config(config)
stop_words_path = 'utils/stopwords.txt' stop_words = set() content = open(stop_words_path, 'rb').read().decode('utf-8') for line in content.splitlines(): stop_words.add(line) words = jieba.cut(text) freq = {} for word in words: if word not in stop_words: freq[word] = freq.get(word, 0.0) + 1.0 return sorted(freq, key=freq.__getitem__, reverse=True)[:topK] if __name__ == "__main__": cp = ConfigParser() cp.read("config/path.cfg") ad_path = cp.get("raw_data", "ad") ad_result_path = cp.get("keys", "ad") news_path = cp.get("raw_data", "news") news_result_path = cp.get("keys", "news") print("Extracting ads keys...") extract_ads(path=ad_path, result_path=ad_result_path) print("ads keys extracted finished.") print("Extracting news keys...") extract_news(path=news_path, result_path=news_result_path) print("news keys extracted finished.")
""" Created on Tue Apr 24 09:29:52 2018 @author: michaelek """ import os from configparser import ConfigParser from datetime import date ##################################### ### Parameters ## Generic base_dir = os.path.realpath(os.path.dirname(__file__)) ini1 = ConfigParser() ini1.read([os.path.join(base_dir, os.path.splitext(__file__)[0] + '.ini')]) py_file = 'main.py' base_dir = os.path.split(os.path.realpath(os.path.dirname(__file__)))[0] input_dir = 'input_data' hydro_server = str(ini1.get('Input', 'hydro_server')) hydro_database = str(ini1.get('Input', 'hydro_database')) ts_table = str(ini1.get('Input', 'ts_table')) sites_table = 'ExternalSite' hydrotel_server = str(ini1.get('Input', 'hydrotel_server')) hydrotel_database = str(ini1.get('Input', 'hydrotel_database'))
class customBadges(ts3plugin): __path__ = getScriptPath(__name__) name = "Custom Badges" try: apiVersion = getCurrentApiVersion() except: apiVersion = 21 requestAutoload = False version = "0.9.5.1" author = "Bluscream" description = "Automatically sets some badges for you :)" offersConfigure = True commandKeyword = "" infoTitle = "[b]Badges[/b]" menuItems = [ (ts3defines.PluginMenuType.PLUGIN_MENU_TYPE_GLOBAL, 0, "Change " + name, "") # , # (ts3defines.PluginMenuType.PLUGIN_MENU_TYPE_GLOBAL, 1, "Generate Badge UIDs", "") ] hotkeys = [] ini = path.join(__path__, "settings.ini") txt = path.join(__path__, "notice") ui = path.join(__path__, "badges.ui") icons = path.join(ts3lib.getConfigPath(), "cache", "badges") # icons_ext = path.join(icons, "external") badges_ext_remote = "https://raw.githubusercontent.com/R4P3-NET/CustomBadges/master/badges.json" badges_remote = "https://badges-content.teamspeak.com/list" cfg = ConfigParser() dlg = None cfg["general"] = { "cfgversion": "1", "debug": "False", "enabled": "True", "badges": "", "overwolf": "False" } badges = {} extbadges = {} notice = QTimer() notice_nwmc = QNetworkAccessManager() mode = HookMode.NONE def __init__(self): if getAddonStatus("tspatch", "TS Patch").value > AddonStatus.INSTALLED.value: self.mode = HookMode.TSPATCH elif getAddonStatus("TS3Hook", "TS3Hook").value > AddonStatus.INSTALLED.value: self.mode = HookMode.TS3HOOK loadCfg(self.ini, self.cfg) self.requestBadges() self.requestBadgesExt() self.notice.timeout.connect(self.checkNotice) self.notice.start(30 * 1000) if PluginHost.cfg.getboolean("general", "verbose"): ts3lib.printMessageToCurrentTab( "{0}[color=orange]{1}[/color] Plugin for pyTSon by [url=https://github.com/{2}]{2}[/url] loaded." .format(timestamp(), self.name, self.author)) def infoData(self, schid, id, atype): if atype != ts3defines.PluginItemType.PLUGIN_CLIENT: return None (err, ownID) = ts3lib.getClientID(schid) if ownID != id: return None # overwolf = self.cfg.getboolean('general', 'overwolf') # badges = self.cfg.get('general', 'badges').split(',') (err, badges) = ts3lib.getClientVariable( schid, id, ts3defines.ClientPropertiesRare.CLIENT_BADGES) (overwolf, badges) = parseBadges(badges) _return = [ "Overwolf: {0}".format("[color=green]Yes[/color]" if overwolf else "[color=red]No[/color]") ] # i = [] # for badge in badges: # if badge for badge in badges: lst = self.badges if badge in self.extbadges: lst = self.extbadges _return.append("{} {}".format( # "[img]data://image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAACp0lEQVR42qXSW0jTURwH8O/5/93mLm460alFXrKLqQ8aNUzRB6NWhOYG9ZDYlYiSHgoCs4d6qIwIiuipIOmtqClYUZEIhlDgbTIvealR7T6bm3Nz2///P/2bEURlRQd+L79zzuf8+HII/nOR5TYnduw+zjGMuuSJue2fgcltdRfT9BvO0kgY9pEPl8pedLb+NTCztb5cWZo3oM0GGI8PDi+B2xXauOl55+AfAZuhQQZ58pCudl0RlSiBJCmY0QFYJ4LjvEDK9M86ossDVQZjanXxI0VBOiSHWxI97vp5eF9bMOXjTTXdXebfAk5Dg4pZqZtPXasAL0+DvPlcoh+5eRns2zFMeHmE4kJKZcf90C8Be239Xc2eqgPEOQOiy4f82JlEP3ThFBjLMGJqHd5Me9sNfd0HfwKc2435is3F76TZFLHeV2BzC6Fsu5PYCx4yguvtgUq3Av2TPszMLhQ00dD7HwBX9c6+lNq8Lfz4EDi7ExJ9DVRX25eA5n3gerohS88BNx/H4yl7b+OCv+Y74JRkmGT1+oeyXBacKwAhMAd21Rqk3HqQAAL7d0KwWEEVadDwBFbHHPpn/aYjkaA5AXhyi9zKE9WZ/MgYaJIMiAsQnB+B1JzEC9zoINisQtAonyg1R2C22YN75/0a4ma0JxWnG26A84EGwqACheD1gl29Hqpr90Aog8BRE4RhCyDXgC7yUHHAp0AEXS53C/FXVMakxlIJb50Wvx0B5UXAaUdSeSVUbbe/ZdAkZvBSnCgDNMwDUQ5agcVTlwfkc0UVTW4qA52yAQJA4xwYsQSHGGRdI4hKjciVViRn5YFGqHhZABFLGiPocjiWQvQWl1CqlYVonAf3dQLxXDy6iLjNJpo8hMwcUFaGOMeD5wRxSgHu8KJql99DvgBZsjDj7AAlKgAAAABJRU5ErkJggg==[/img]", # "[img]file:///C:/Users/blusc/AppData/Roaming/TS3Client/styles/dark/apply.svg[/img]", "[img]https://badges-content.teamspeak.com/{}/{}.svg[/img]". format(badge, lst[badge]["filename"] if badge in lst else "unknown"), self.badgeNameByUID(badge, lst) if badge in lst else badge)) return _return def saveBadges(self, external): db = ts3client.Config() query = QSqlQuery(db) (timestamp, internal, array) = loadBadges() delimiter = array.mid(0, 12) delimiter1 = 0 delimiter2 = 0 delimiter3 = 0 delimiter4 = 0 guid_len = 0 guid = "" name_len = 0 name = "" url_len = 0 url = "" desc_len = 0 desc = "" for i in range(0, array.size()): if i == 12: #guid_len guid_len = int(array.at(i)) guid = str(array.mid(i + 1, guid_len)) elif i == (12 + 1 + guid_len + 1): delimiter1 = array.mid(i - 1, i - 1) name_len = int(array.at(i)) name = str(array.mid(i + 1, name_len)) elif i == (12 + 1 + guid_len + 1 + name_len + 2): delimiter2 = array.mid(i - 1, i - 1) url_len = int(array.at(i)) url = str(array.mid(i + 1, url_len)) elif i == (12 + 1 + guid_len + 1 + name_len + 2 + url_len + 2): delimiter3 = array.mid(i - 3, i - 3) delimiter4 = array.mid(i + desc_len, i + desc_len) desc_len = int(array.at(i)) desc = str(array.mid(i + 1, desc_len)) break print("delimiter:", delimiter.toHex()) print("delimiter1:", delimiter1.toHex()) print("delimiter2:", delimiter2.toHex()) print("delimiter3:", delimiter3.toHex()) print("delimiter4:", delimiter4.toHex()) print("array:", array.toHex()) # query.prepare( "UPDATE Badges (BadgesListData) VALUES (:byteArray)" ); # query.bindValue( ":imageData", array); def badgeNameByUID(self, uid, lst=badges): for badge in lst: if badge == uid: return lst[badge]["name"] def requestBadges(self): self.nwmc_badges = QNetworkAccessManager() self.nwmc_badges.connect("finished(QNetworkReply*)", self.loadBadges) self.nwmc_badges.get(QNetworkRequest(QUrl(self.badges_remote))) def loadBadges(self, reply): try: data = reply.readAll().data() # .decode('utf-8') self.badges = parseBadgesBlob(QByteArray(data))[0] except: ts3lib.logMessage(format_exc(), ts3defines.LogLevel.LogLevel_ERROR, "pyTSon", 0) (tstamp, self.badges, array) = loadBadges() del self.nwmc_badges def requestBadgesExt(self): self.nwmc_ext = QNetworkAccessManager() self.nwmc_ext.connect("finished(QNetworkReply*)", self.loadBadgesExt) self.nwmc_ext.get(QNetworkRequest(QUrl(self.badges_ext_remote))) def loadBadgesExt(self, reply): try: data = reply.readAll().data().decode('utf-8') self.extbadges = loads(data) self.nwmc_exti = {} self.tmpfile = {} for badge in self.extbadges: _name = self.extbadges[badge]["filename"] _path = path.join(self.icons, _name) if path.exists(_path) and path.getsize(_path) > 0: continue self.requestExtIcon(_name) self.requestExtIcon("{}_details".format(_name)) except: ts3lib.logMessage(format_exc(), ts3defines.LogLevel.LogLevel_ERROR, "pyTSon", 0) del self.nwmc_exti def requestExtIcon(self, filename): self.nwmc_exti[filename] = QNetworkAccessManager() self.nwmc_exti[filename].connect("finished(QNetworkReply*)", self.loadExtIcon) self.tmpfile[filename] = QFile() self.tmpfile[filename].setFileName(path.join(self.icons, filename)) self.tmpfile[filename].open(QIODevice.WriteOnly) url = "https://raw.githubusercontent.com/R4P3-NET/CustomBadges/master/img/{}".format( filename) self.nwmc_exti[filename].get(QNetworkRequest(QUrl(url))) def loadExtIcon(self, reply): try: if reply.error() != QNetworkReply.NoError: ts3lib.logMessage( "Requesting \"{}\" failed:\n{}".format( reply.url().toString(), reply.errorString()), ts3defines.LogLevel.LogLevel_ERROR, "pyTSon", 0) return name = reply.url().fileName() self.tmpfile[name].write(reply.readAll()) if self.tmpfile[name].isOpen(): self.tmpfile[name].close() self.tmpfile[name].deleteLater() except: ts3lib.logMessage(format_exc(), ts3defines.LogLevel.LogLevel_ERROR, "pyTSon", 0) def checkNotice(self): self.notice_nwmc.connect("finished(QNetworkReply*)", self.loadNotice) self.notice_nwmc.get( QNetworkRequest( QUrl( "https://raw.githubusercontent.com/R4P3-NET/CustomBadges/master/notice" ))) def loadNotice(self, reply): data = reply.readAll().data().decode('utf-8') if not path.isfile(self.txt): with open(self.txt, 'w'): pass data_local = "" with open(self.txt, 'r') as myfile: data_local = myfile.read() myfile.close() data = data.strip() # print("data:", data) # print("data_local:", data_local) if data == "" or data == data_local.strip(): return # self.cfg.get('general', 'lastnotice') with open(self.txt, "w") as text_file: text_file.write(data) text_file.close() # self.cfg.set('general', 'lastnotice', data) title = "{} Notice!".format(self.name) msgBox(data, 0, title) ts3lib.printMessageToCurrentTab("{}\n\n{}".format(title, data)) def stop(self): saveCfg(self.ini, self.cfg) self.notice.stop() def configure(self, qParentWidget): self.openDialog() def onMenuItemEvent(self, schid, atype, menuItemID, selectedItemID): if atype != ts3defines.PluginMenuType.PLUGIN_MENU_TYPE_GLOBAL: return if menuItemID == 0: self.openDialog() elif menuItemID == 1: self.saveBadges(self.extbadges) for i in range(0, 3): # 0c4u2snt-ao1m-7b5a-d0gq-e3s3shceript uid = [ random_string(8, string.ascii_lowercase + string.digits) ] for _i in range(0, 3): uid.append( random_string(4, string.ascii_lowercase + string.digits)) uid.append( random_string(12, string.ascii_lowercase + string.digits)) ts3lib.printMessageToCurrentTab( "[color=red]Random UID #{}: [b]{}".format( i, '-'.join(uid))) def onConnectStatusChangeEvent(self, schid, newStatus, errorNumber): if newStatus == ts3defines.ConnectStatus.STATUS_CONNECTION_ESTABLISHED: self.setCustomBadges() def setCustomBadges(self): # try: if self.mode == HookMode.NONE: return overwolf = self.cfg.getboolean('general', 'overwolf') badges = self.cfg.get('general', 'badges').split(",") # if len(badges) > 0: badges += ['0c4u2snt-ao1m-7b5a-d0gq-e3s3shceript'] (err, schids) = ts3lib.getServerConnectionHandlerList() reg = compile('3(?:\.\d+)* \[Build: \d+\]') for schid in schids: _badges = badges err, ver = ts3lib.getServerVariable( schid, ts3defines.VirtualServerProperties.VIRTUALSERVER_VERSION) err, platform = ts3lib.getServerVariable( schid, ts3defines.VirtualServerProperties.VIRTUALSERVER_PLATFORM) if getServerType(schid, reg) in [ ServerInstanceType.TEASPEAK, ServerInstanceType.UNKNOWN ]: _badges = [x for x in badges if not x in self.extbadges][:3] _badges = buildBadges(_badges, overwolf) sendCommand(name=self.name, cmd=_badges, schid=schid, mode=self.mode) # except: ts3lib.logMessage(format_exc(), ts3defines.LogLevel.LogLevel_ERROR, "pyTSon", 0) def openDialog(self): if not self.dlg: self.dlg = BadgesDialog(self) self.dlg.show() self.dlg.raise_() self.dlg.activateWindow()
import util argparser = ArgumentParser() argparser.add_argument('--numEpoch',type=int, default=10) argparser.add_argument('--logEvery',type=int, default =2000) argparser.add_argument('--learningRate',type=float, default = 0.1) argparser.add_argument('--gradientClip',type=float, default = 0.25) argparser.add_argument('--cuda',action='store_true') argparser.add_argument('--samplePrediction',action='store_true') argparser.add_argument('--debug',action='store_true') argparser.add_argument('--reprocess',action='store_true',help='process whole data again') argparser.add_argument('--validEvery',type=int, default = 200000, help='perform validation and anneal learning rate every given iterations') args = argparser.parse_args() config = ConfigParser() config.read('config.ini') config.set('general','debug','True' if args.debug else 'False') config.set('general','reprocess','True' if args.reprocess else 'False') device = torch.device('cuda' if args.cuda else 'cpu') def train(dataSource,args): global model_ global hidden global lr #data load trainF,trainT = dataSource # target mean for R2 score targetMean=torch.FloatTensor(trainT).mean(dim=0)
def get_api_key(): config = ConfigParser() config.read('../config/api.cfg') return config.get('captcha_api', 'key')
def change_cookie(self): cfg.set('picbed', "cookie", self.lineEdit_cookie.text()) def changeEvent(self, event): if event.type() == QEvent.WindowStateChange and self.isMinimized(): self.tray.showMessage("通知", "已最小化到托盘,点击开始截图") self.tray.show() self.hide() def closeEvent(self, event): reply = QMessageBox.information(self, "消息", "是否退出程序", QMessageBox.Yes | QMessageBox.No) if reply == QMessageBox.Yes: event.accept() print("修改配置文件") cfg.write(open("config.ini", "r+", encoding="utf-8")) else: event.ignore() if __name__ == "__main__": cfg = ConfigParser() cfg.read('config.ini') cfg.get('picbed', 'cookie') app = QApplication(sys.argv) window = Main() window.show() height = QApplication.desktop().screenGeometry().height() width = QApplication.desktop().screenGeometry().width() sys.exit(app.exec_())
# Licensed under GPL version 3 - see LICENSE.rst from os import path from configparser import ConfigParser from cffi import FFI with open (path.join(path.dirname(__file__), "cdef.txt"), "r") as myfile: cdeftxt=myfile.read() ffi = FFI() ffi.cdef(cdeftxt) conf = ConfigParser() # When setup.py is run, then setup.cfg is in the current directory # However, when runngin this in pytest, it's in ../ # So offer both options here (files not found are silently ignored). conf.read(['setup.cfg', '../setup.cfg', '../../setup.cfg']) marxscr = conf.get('MARX', 'srcdir') marxlib = conf.get('MARX', 'libdir') sources = [('pfile', 'src', 'pfile.c'), ('marx', 'libsrc', 'mirror.c')] headers = [('pfile', 'src'), ('jdmath', 'src') , ('jdfits', 'src'), ('marx', 'src',), ('marx', 'libsrc',), ('src',), ('libsrc',)] ffi.set_source("_marx", ''' # include "pfile.h" # include "_pfile.h" # include <jdmath.h> # include "marx.h"
def __init__(self, config_file, extra_args): self.config_file = config_file print("read config from " + config_file) config = ConfigParser() config.read(config_file) if extra_args: extra_args = dict([ (k[2:], v) for k, v in zip(extra_args[0::2], extra_args[1::2]) ]) for section in config.sections(): for k, v in config.items(section): if k in extra_args: v = type(v)(extra_args[k]) config.set(section, k, v) self._conf = config if not os.path.isdir(self.model_dir): os.mkdir(self.model_dir) assert self.model_dir.endswith('/') config.write(open(self.model_dir + self.config_file + '.bak', 'w')) print('Loaded config file successfully.') for section in config.sections(): for k, v in config.items(section): print(k, v)
import time import sys import os sys.path.append('/home/pi/droneponics') import drone from configparser import ConfigParser import logging parser = ConfigParser() parser.read("/home/pi/droneponics/config/Google/"+drone.gethostname()+".ini") # tune console logging _log = logging.getLogger('GoogleLog') logFormatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") consoleHandler = logging.StreamHandler() consoleHandler.setFormatter(logFormatter) _log.addHandler(consoleHandler) _log.setLevel(parser.get('logging', 'logLevel', fallback=logging.DEBUG)) _log.critical("critical") _log.error("error") _log.warning("warning") _log.info("info") _log.debug("debug") _log.info("ConfigParser path = /home/pi/droneponics/config/Google/"+drone.gethostname()+".ini")
def load(self, filename): if not os.path.isfile(filename): raise Exception('No such config file: ' + os.path.abspath(filename)) with open(filename) as f: parameters = ConfigParser() if hasattr(parameters, 'read_file'): parameters.read_file(f) else: parameters.readfp(f) if not parameters.has_section('Types'): raise RuntimeError( "'Types' section not found in NEAT configuration file.") # Phenotype configuration self.input_nodes = None self.output_nodes = None self.hidden_nodes = int(parameters.get('phenotype', 'hidden_nodes')) self.initial_connection = parameters.get('phenotype', 'initial_connection') self.connection_fraction = None self.max_weight = float(parameters.get('phenotype', 'max_weight')) self.min_weight = float(parameters.get('phenotype', 'min_weight')) self.feedforward = bool(int(parameters.get('phenotype', 'feedforward'))) self.weight_stdev = float(parameters.get('phenotype', 'weight_stdev')) self.activation_functions = parameters.get( 'phenotype', 'activation_functions').strip().split() # Verify that initial connection type is valid. if 'partial' in self.initial_connection: c, p = self.initial_connection.split() self.initial_connection = c self.connection_fraction = float(p) if not (0 <= self.connection_fraction <= 1): raise Exception( "'partial' connection value must be between 0.0 and 1.0, inclusive." ) assert self.initial_connection in self.allowed_connectivity # Verify that specified activation functions are valid. for fn in self.activation_functions: if not activation_functions.is_valid(fn): raise Exception( "Invalid activation function name: {0!r}".format(fn)) # Select a genotype class. if self.feedforward: self.genotype = FFGenome else: self.genotype = Genome # Genetic algorithm configuration self.pop_size = int(parameters.get('genetic', 'pop_size')) self.max_fitness_threshold = float( parameters.get('genetic', 'max_fitness_threshold')) self.prob_add_conn = float(parameters.get('genetic', 'prob_add_conn')) self.prob_add_node = float(parameters.get('genetic', 'prob_add_node')) self.prob_delete_conn = float( parameters.get('genetic', 'prob_delete_conn')) self.prob_delete_node = float( parameters.get('genetic', 'prob_delete_node')) self.prob_mutate_bias = float( parameters.get('genetic', 'prob_mutate_bias')) self.bias_mutation_power = float( parameters.get('genetic', 'bias_mutation_power')) self.prob_mutate_response = float( parameters.get('genetic', 'prob_mutate_response')) self.response_mutation_power = float( parameters.get('genetic', 'response_mutation_power')) self.prob_mutate_weight = float( parameters.get('genetic', 'prob_mutate_weight')) self.prob_replace_weight = float( parameters.get('genetic', 'prob_replace_weight')) self.weight_mutation_power = float( parameters.get('genetic', 'weight_mutation_power')) self.prob_mutate_activation = float( parameters.get('genetic', 'prob_mutate_activation')) self.prob_toggle_link = float( parameters.get('genetic', 'prob_toggle_link')) self.reset_on_extinction = bool( int(parameters.get('genetic', 'reset_on_extinction'))) # genotype compatibility self.compatibility_threshold = float( parameters.get('genotype compatibility', 'compatibility_threshold')) self.excess_coefficient = float( parameters.get('genotype compatibility', 'excess_coefficient')) self.disjoint_coefficient = float( parameters.get('genotype compatibility', 'disjoint_coefficient')) self.weight_coefficient = float( parameters.get('genotype compatibility', 'weight_coefficient')) # Gene types self.node_gene_type = NodeGene self.conn_gene_type = ConnectionGene stagnation_type_name = parameters.get('Types', 'stagnation_type') reproduction_type_name = parameters.get('Types', 'reproduction_type') if stagnation_type_name not in self.registry: raise Exception( 'Unknown stagnation type: {!r}'.format(stagnation_type_name)) self.stagnation_type = self.registry[stagnation_type_name] self.type_config[stagnation_type_name] = parameters.items( stagnation_type_name) if reproduction_type_name not in self.registry: raise Exception('Unknown reproduction type: {!r}'.format( reproduction_type_name)) self.reproduction_type = self.registry[reproduction_type_name] self.type_config[reproduction_type_name] = parameters.items( reproduction_type_name) # Gather statistics for each generation. self.collect_statistics = True # Show stats after each generation. self.report = True # Save the best genome from each generation. self.save_best = False # Time in minutes between saving checkpoints, None for no timed checkpoints. self.checkpoint_time_interval = None # Time in generations between saving checkpoints, None for no generational checkpoints. self.checkpoint_gen_interval = None
new_lines = [] for added_file_path in added_files: fileID = os.path.basename(added_file_path) fileID = os.path.splitext(fileID)[0] fileType = get_file_type(added_file_path) fileCat = get_file_category(added_file_path) fileDate = str(datetime.datetime.now().isoformat()) + "Z" fileDate = fileDate.replace(' ', '-') new_manifest_line = "{0},{1},{2},8,{3},{3},,,False,0,False".format(fileID, fileType, added_file_path, fileDate) new_lines.append(new_manifest_line) return new_lines def write_new_manifest(new_manifest_lines): copy_original_manifest() new_manifest = open('VersionManifest.csv', 'a') for new_line in new_manifest_lines: new_manifest.write(new_line) new_manifest.write('\n') new_manifest.write(',\n') new_manifest.close() if __name__ == "__main__": config = ConfigParser() config.read('config.ini') battletech_home = config.get('directories', 'battletech_home') added_files = get_added_file_list() new_manifest_lines = get_new_manifest_lines(added_files) write_new_manifest(new_manifest_lines)
import spotify_client import requests from urllib.parse import urlencode import json from configparser import ConfigParser from termcolor import colored song_list = [] config = ConfigParser() config.read('config.ini') def get_get_token_header(): access_token = spotify_client.extract_access_token()[0] return { 'Authorization': f'Bearer {access_token}' } def get_user_playlist(): while config['user']['playlist_id'] == '': playlist_id_new = input( colored('We need the Playlist ID. Please enter the ID: ', 'yellow')) if playlist_id_new == '': continue config['user']['playlist_id'] = playlist_id_new # save to a file with open('config.ini', 'w') as configfile: config.write(configfile) print(colored('Playlist has been updated.', 'green'))
#Assignment 1 from configparser import ConfigParser config = ConfigParser() config.read("exc.ini") print(config.sections()) print(config.options('EXT')) import os os.chdir(config.get("EXT","current_path")) for file in os.listdir("."): if file.endswith(config.get("EXT","from")): first_name = file.rsplit(".",1)[0] new_name = first_name + config.get("EXT","to") print(new_name) os.rename(file,new_name) """ Assignment2 """ import os rep=os.walk("C:\\Users\\DELL\\Google Drive") d1={} for r,d,f in rep: for file in f: d1.setdefault(file,[]).append(r) file_name=input('Enter the file name:') for k,v in d1.items(): if file_name.lower() in k.lower():
@time: 2019-08-02(星期五) 16:35 @Copyright © 2019. All rights reserved. ''' from pathlib import Path import re from configparser import ConfigParser from .logs import create_logger path = Path(__file__).parent for item in ['json', 'xml', 'xls']: p = path/'src'/item p.mkdir(parents=True, exist_ok=True) logger = create_logger('xuexi', 'DEBUG') cfg = ConfigParser() cfg.read(path/'config-default.ini', encoding='utf-8') cfg.read(path/'config-custom.ini', encoding='utf-8') class App(object): def __init__(self): self.rules = cfg.get('common', 'device') self.xmluri = Path(cfg.get(self.rules, 'xml_uri')) self.ad = adble.Adble( self.xmluri, cfg.getboolean(self.rules, 'is_virtual_machine'), cfg.get(self.rules, 'host'), cfg.getint(self.rules, 'port')) self.xm = xmler.Xmler(self.xmluri) def _art_run(self):
def __init__(self, path=None): ConfigParser.__init__(self, allow_no_value=True) self.optionxform = str #防止欄位自動轉小寫 self.read(path, encoding='utf-8-sig') self.path = path
#try: # import mysql.connector.pooling #except ImportError: # print("Cannot import pooling") # sys.exit # ********************************************************************* try: # ConfigParser import configparser from configparser import ConfigParser except ImportError: print("Cannot import ConfigParser, is it installed?\nExiting...") sys.exit # ********************************************************************* try: # ConfigParser config = ConfigParser() config.read('../config.ini') mysql_host = config.get('mysql', 'host') mysql_db = config.get('mysql', 'database') mysql_user = config.get('mysql', 'user') mysql_password = config.get('mysql', 'password') mysql_port = config.get('mysql', 'port') serialPort = config.get('serial', 'port') Debug = config.getboolean('debug', 'debug') interval = int(config.get('timeing', 'interval')) slaveAddresses = [ int(x) for x in config.get('serial', 'addresses').split(',') ] except configparser.NoOptionError as err: print('Error ', err) sys.exit
def write_config(self, hg19=None, hg38=None, mm10=None): conf = ConfigParser() for x in self.clfs['default']: clf_fh = str(self.clfs['default'][x]) if not os.path.isfile(clf_fh): realpath = get_path() + '/resources/training_sets/' + clf_fh if os.path.isfile(realpath): self.clfs['default'][x] = realpath else: print 'FATAL ERROR: {} pickle file not found. If this file is missing, reinstall sv2: pip uninstall sv2 -y && pip install sv2-VERSION.tar.gz'.format( realpath) sys.stderr.write( 'FATAL ERROR: {} pickle file not found. If this file is missing, reinstall sv2: pip uninstall sv2 -y && pip install sv2-VERSION.tar.gz\n' .format(realpath)) sys.exit(1) dump_json(self.json, self.clfs) if not os.path.isfile(self.fh): conf_fh = open(self.fh, 'w') conf.add_section('FASTA_PATHS') conf.set('FASTA_PATHS', 'hg19', hg19) conf.set('FASTA_PATHS', 'hg38', hg38) conf.set('FASTA_PATHS', 'mm10', mm10) conf.set('RESOURCE_DIR', 'sv2_resource', self.resource) conf.write(conf_fh) conf_fh.close() else: conf.read(self.fh) if hg19 != None: errFH(hg19) conf.set('FASTA_PATHS', 'hg19', hg19) if hg38 != None: errFH(hg38) conf.set('FASTA_PATHS', 'hg38', hg38) if mm10 != None: errFH(mm10) conf.set('FASTA_PATHS', 'mm10', mm10) if self.resource != None: conf.set('RESOURCE_DIR', 'sv2_resource', self.resource) with open(self.fh, 'w') as conf_fh: conf.write(conf_fh)
def set(self, section, option, value=None): if not self.has_section(section): self.add_section(section) ConfigParser.set(self, section, option, value)
class ReadMeUpdater(): def __init__(self, config_file_path='config.ini'): try: if not os.path.exists(config_file_path): raise FileNotFoundError(config_file_path) if not os.path.exists(READ_ME_PATH): raise FileNotFoundError(READ_ME_PATH) self.config = ConfigParser() self.config.read(config_file_path) self.lines = None with open(READ_ME_PATH, 'r') as file_handler: self.lines = file_handler.readlines() day_number = datetime.today().day min_number = datetime.today().minute copyfile(READ_ME_PATH, BACKUP_READ_ME_PATH.format(day_number, min_number)) self.search_terms = [] self.search_index = {} self.__build_search_terms() self.__build_search_index() except Exception as e: print('Something went while reading configurations', str(e)) sys.exit() def __build_search_index(self): for i, elem in enumerate(self.lines): for term in self.search_terms: if term in elem: self.search_index[term] = i def __build_search_terms(self): update_streak = self.config['StreakSection'].getboolean('UpdateStreak') streak_broken = self.config['StreakSection'].getboolean('StreakBroken') if update_streak: self.search_terms.append(STREAK_SEARCH_VAL) self.search_terms.append(CURRENT_STREAK_SEARCH_DATE_VAL) if streak_broken: self.search_terms.append(LAST_STREAK_SEARCH_DATE_VAL) update_project = self.config['ProjectUpdateSection'].getboolean('UpdateProject') if update_project: project_section = self.config.get('ProjectUpdateSection', 'Section') self.search_terms.append(PROJECT_END_SEARCH_STR.format(project_section)) def __increment_streak_line(self, streak_line, streak_date_line, streak_broken): lst = streak_line.split('|') if not streak_broken: seq_no = int(lst[2]) + 1 streak_dates_str = streak_date_line.split('|') date_list = streak_dates_str[2].split('-') date_obj0 = datetime.strptime(date_list[0].strip(), DATE_FORMAT_STR) date_obj1 = datetime.strptime(date_list[1].strip(), DATE_FORMAT_STR) delta = (date_obj1 - date_obj0).days + 1 assert delta == seq_no, F"Delta {delta}, seq:{seq_no}, date1: {date_obj0}, date2: {date_obj1}" else: seq_no = 1 lst[2] = F' {seq_no} ' return '|'.join(lst) def __increment_streak_date_line(self, line, streak_broken): lst = line.split('|') date_list = lst[2].split('-') todays_date = datetime.now().strftime(DATE_FORMAT_STR) if not streak_broken: date_obj = datetime.strptime(date_list[1].strip(), DATE_FORMAT_STR) date_obj += timedelta(days=1) new_date = date_obj.strftime(DATE_FORMAT_STR) assert todays_date == new_date, F'Todays date: {todays_date}, new date: {new_date}' date_list[1] = F' {new_date} ' else: date_list[0] = F' {todays_date} ' date_list[1] = F' {todays_date} ' lst[2] = '-'.join(date_list) return '|'.join(lst) def __reset_last_streak_date_line(self, last_streak, cur_streak): lst = last_streak.split('|') cur = cur_streak.split('|') cur_date_list = cur[2].split('-') lst_date_list = lst[2].split('-') lst_date_list[0] = cur_date_list[0] lst_date_list[1] = F' {(datetime.now() - timedelta(2)).strftime(DATE_FORMAT_STR)} ' lst[2] = '-'.join(lst_date_list) return '|'.join(lst) def _get_next_proj_seq_no(self, section): index = self.search_index[PROJECT_END_SEARCH_STR.format(section)] prev_proj_line = self.lines[index-1] seq_no_elements = prev_proj_line.split('|')[1].split('.') seq_no = int(seq_no_elements[0]) + 1 return seq_no def __build_project_string(self): update_project = self.config['ProjectUpdateSection'].getboolean('UpdateProject') if not update_project: return proj_section = self.config.get('ProjectUpdateSection', 'Section') index = self.search_index[PROJECT_END_SEARCH_STR.format(proj_section)] proj_name = self.config.get('ProjectUpdateSection', 'Project') proj_desc = self.config.get('ProjectUpdateSection', 'Description') proj_notebook = self.config.get('ProjectUpdateSection', 'Notebook') proj_notes = self.config.get('ProjectUpdateSection', 'Notes') seq_no = self._get_next_proj_seq_no(proj_section) line = F"|{seq_no}.| {proj_name} | {proj_desc} | {proj_notebook} | {proj_notes} |\n" return (index, line) def update_streak_stats(self): update_streak = self.config['StreakSection'].getboolean('UpdateStreak') streak_broken = self.config['StreakSection'].getboolean('StreakBroken') if not update_streak: return cur_streak_date_line = self.lines[self.search_index[CURRENT_STREAK_SEARCH_DATE_VAL]] cur_streak_line = self.lines[self.search_index[STREAK_SEARCH_VAL]] new_last_streak_date_line = "" if streak_broken: last_streak_date_line = self.lines[self.search_index[LAST_STREAK_SEARCH_DATE_VAL]] new_last_streak_date_line = self.__reset_last_streak_date_line(last_streak_date_line, cur_streak_date_line) self.lines[self.search_index[LAST_STREAK_SEARCH_DATE_VAL]] = new_last_streak_date_line new_cur_streak_date_line = self.__increment_streak_date_line(cur_streak_date_line, streak_broken) new_cur_streak_line = self.__increment_streak_line(cur_streak_line, new_cur_streak_date_line, streak_broken) self.lines[self.search_index[CURRENT_STREAK_SEARCH_DATE_VAL]] = new_cur_streak_date_line self.lines[self.search_index[STREAK_SEARCH_VAL]] = new_cur_streak_line def add_new_project(self): update_project = self.config['ProjectUpdateSection'].getboolean('UpdateProject') if not update_project: return index, new_proj_line = self.__build_project_string() self.lines.insert(index, new_proj_line) def print_search_index(self): for k,v in self.search_index.items(): print (k, ":", v) def save_readme(self, path=READ_ME_PATH): with open(path, 'w') as file_handler: file_handler.writelines(self.lines) def update_daily_log(self, path=DAILY_LOG_PATH): update_daily_log = self.config['DailyLogSection'].getboolean('UpdateDailyLog') if not update_daily_log: return notes = self.config.get('DailyLogSection', 'Notes') date = datetime.now().strftime('%d %B %Y') new_line = F'| {date} | {notes} |\n' with open(path, 'a') as file_handler: file_handler.write(new_line)
def _get_transifex_url(self): """ Construct transifex URL based on the module on configuration """ # e.g. 'https://www.transifex.com/izi/' base_url = self.env['ir.config_parameter'].sudo().get_param( 'transifex.project_url') tx_config_file = ConfigParser() tx_sections = [] for addon_path in ad_paths: tx_path = opj(addon_path, '.tx', 'config') if os.path.isfile(tx_path): tx_config_file.read(tx_path) # first section is [main], after [izi-11.sale] tx_sections.extend(tx_config_file.sections()[1:]) # parent directory ad .tx/config is root directory in izi/izi tx_path = opj(addon_path, os.pardir, '.tx', 'config') if os.path.isfile(tx_path): tx_config_file.read(tx_path) tx_sections.extend(tx_config_file.sections()[1:]) if not base_url or not tx_sections: self.update({'transifex_url': False}) else: base_url = base_url.rstrip('/') # will probably be the same for all terms, avoid multiple searches translation_languages = list(set(self.mapped('lang'))) languages = self.env['res.lang'].with_context( active_test=False).search([('code', 'in', translation_languages)]) language_codes = dict((l.code, l.iso_code) for l in languages) # .tx/config files contains the project reference # using ini files like '[izi-master.website_sale]' translation_modules = set(self.mapped('module')) project_modules = {} for module in translation_modules: for section in tx_sections: tx_project, tx_mod = section.split('.') if tx_mod == module: project_modules[module] = tx_project for translation in self: if not translation.module or not translation.source or translation.lang == 'en_US': # custom or source term translation.transifex_url = False continue lang_code = language_codes.get(translation.lang) if not lang_code: translation.transifex_url = False continue project = project_modules.get(translation.module) if not project: translation.transifex_url = False continue # e.g. https://www.transifex.com/izi/izi-10/translate/#fr/sale/42?q=text'Sale+Order' translation.transifex_url = "%(url)s/%(project)s/translate/#%(lang)s/%(module)s/42?q=%(src)s" % { 'url': base_url, 'project': project, 'lang': lang_code, 'module': translation.module, 'src': werkzeug.url_quote_plus( "text:'" + translation.source[:50].replace("'", "\'") + "'"), }
from configparser import ConfigParser import datetime import os import sys import logging if os.path.isfile("config.ini"): config = ConfigParser() config.read("config.ini") level = config["misc"]["logLevel"] else: level = os.environ.get("logLevel") LOGGER = logging.getLogger("SO2MIBOT") LOGFORMAT = logging.Formatter("[%(asctime)s] %(levelname)-8s >>> %(message)s") if level == "debug": logLevel = logging.DEBUG elif level == "info": logLevel = logging.INFO elif level == "error": logLevel = logging.ERROR elif level == "critical": logLevel = logging.CRITICAL else: logLevel = logging.WARNING LOG_SH = logging.StreamHandler() LOG_FH = logging.FileHandler("info.log")
def appium_desired(self): config = ConfigParser() file_path = os.path.dirname( os.path.abspath('.')) + '/config/config.ini' config.read(file_path, encoding='utf-8') desired_caps = {} desired_caps['platformName'] = config.get("phoneType", "platformName") logger.info("You had select %s ." % desired_caps['platformName']) # 设备系统版本 desired_caps['platformVersion'] = config.get("platformType", "platformVersion") logger.info("You had select %s ." % desired_caps['platformVersion']) # 设备名称 desired_caps['deviceName'] = config.get("deviceName", "deviceName") logger.info("You had select %s ." % desired_caps['deviceName']) desired_caps['sessionOverride'] = config.get("sessionOverride", "sessionOverride") dir = os.path.dirname(os.path.abspath('.')) desired_caps['app'] = dir + '/app/' + config.get("app", "app") # desired_caps['app'] = file_path + config.get("app","app") logger.info("You had select %s ." % desired_caps['app']) desired_caps['noRest'] = config.get('noRest', "noRest") # 应用程序的包名 desired_caps['appPackage'] = config.get("appPackage", "appPackage") desired_caps['appActivity'] = config.get("appActivity", "appActivity") logger.info("start run app......") desired_caps['ip'] = config.get('ip', "ip") desired_caps['port'] = config.get('port', "port") driver = webdriver.Remote( 'http://' + str(desired_caps['ip']) + ':' + str(desired_caps['port']) + '/wd/hub', desired_caps) # self.driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps) driver.implicitly_wait(5) logger.info("Set implicitly wait 5 seconds.") return driver
import logging.config from configparser import ConfigParser from . import settings # Change settings based on config config = ConfigParser() config.read(settings.CONFIG_DIR) if config['logging'].getboolean('log') is True: settings.LOGGING_CONFIG['handlers']['console']['level'] = config[ 'logging']['console'].upper() settings.LOGGING_CONFIG['handlers']['file']['level'] = config['logging'][ 'file'].upper() logging.config.dictConfig(settings.LOGGING_CONFIG) else: logging.getLogger('VariantValidator').addHandler(logging.NullHandler()) # <LICENSE> # Copyright (C) 2019 VariantValidator Contributors # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. #
from pkg_resources import parse_version from configparser import ConfigParser import setuptools,re,sys assert parse_version(setuptools.__version__)>=parse_version('36.2') # note: all settings are in settings.ini; edit there, not here config = ConfigParser(delimiters=['=']) config.read('settings.ini') cfg = config['DEFAULT'] cfg_keys = 'version description keywords author author_email'.split() expected = cfg_keys + "lib_name user branch license status min_python audience language".split() for o in expected: assert o in cfg, "missing expected setting: {}".format(o) setup_cfg = {o:cfg[o] for o in cfg_keys} if len(sys.argv)>1 and sys.argv[1]=='version': print(setup_cfg['version']) exit() licenses = { 'apache2': ('Apache Software License 2.0','OSI Approved :: Apache Software License'), 'mit': ('MIT License', 'OSI Approved :: MIT License'), 'gpl2': ('GNU General Public License v2', 'OSI Approved :: GNU General Public License v2 (GPLv2)'), 'gpl3': ('GNU General Public License v3', 'OSI Approved :: GNU General Public License v3 (GPLv3)'), 'bsd3': ('BSD License', 'OSI Approved :: BSD License'), } statuses = [ '1 - Planning', '2 - Pre-Alpha', '3 - Alpha', '4 - Beta', '5 - Production/Stable', '6 - Mature', '7 - Inactive' ] py_versions = '2.0 2.1 2.2 2.3 2.4 2.5 2.6 2.7 3.0 3.1 3.2 3.3 3.4 3.5 3.6 3.7 3.8'.split() lic = licenses.get(cfg['license'].lower(), (cfg['license'], None))