def WriteDemo(self): """ Creates a dummy ini file named configfile.ini """ config = ConfigObj() config.filename = "configfile.ini" # config['keyword1'] = "value1" config['keyword2'] = "value2" # config['section1'] = {} config['section1']['keyword3'] = "value3" config['section1']['keyword4'] = "value4" # section2 = { 'keyword5': "value5", 'keyword6': "value6", 'sub-section': { 'keyword7': "value7" } } config['section2'] = "section2" # config['section3'] = {} config['section3']['keyword 8'] = ["value8", "value9", "value10"] config['section3']['keyword 9'] = [11, 12, 13] # config.write()
def send_email(subject, to_addr, body_text): base_path = os.path.dirname(os.path.abspath(__file__)) config_path = os.path.join(base_path, "config.ini") if os.path.exists(config_path): cfg = ConfigObj(config_path) cfg_dict = cfg.dict() else: print "Config not found! Exiting!" sys.exit(1) host = cfg_dict["server"] from_addr = cfg_dict["from_addr"] password = cfg_dict["password"] BODY = string.join(( "From: %s" % from_addr, "To: %s" % to_addr, "Subject: %s" % subject , "", body_text ), "\r\n") server = smtplib.SMTP(host) server.login(from_addr, password) server.sendmail(from_addr, [to_addr], BODY) server.quit()
def import_policies(file_contents): """ This function imports policies from a file. The file has a config_object format, i.e. the text file has a header [<policy_name>] key = value and key value pairs. :param file_contents: The contents of the file :type file_contents: basestring :return: number of imported policies :rtype: int """ policies = ConfigObj(file_contents.split('\n'), encoding="UTF-8") res = 0 for policy_name in policies.keys(): ret = set_policy(name=policy_name, action=eval(policies[policy_name].get("action")), scope=policies[policy_name].get("scope"), realm=eval(policies[policy_name].get("realm", "[]")), user=eval(policies[policy_name].get("user", "[]")), resolver=eval(policies[policy_name].get("resolver", "[]")), client=eval(policies[policy_name].get("client", "[]")), time=policies[policy_name].get("time", "") ) if ret > 0: log.debug("import policy %s: %s" % (policy_name, ret)) res += 1 return res
def cli_cmd_dump_config(): """Dump the selected collection/s""" config = ConfigObj(indent_type=IDENT_TYPE) config.initial_comment = [ str(datetime.datetime.now()), "This file is automatically generated by Invenio, running:", " ".join(sys.argv) , "" ] force_ids = False if "--force-ids" in sys.argv: force_ids = True print_to_screen = False if '--output' in sys.argv: print_to_screen = True try: config.filename = sys.argv[sys.argv.index('-d') + 1] except: print_usage() if '-col' in sys.argv: try: collection = COLLECTIONS[sys.argv[sys.argv.index('-col') + 1].upper()] dump_collection(collection, config, force_ids, print_to_screen) except: print("ERROR: you must especify the collection to dump with the -col COLLECTION_NAME option") elif '-all' in sys.argv: for collection in COLLECTIONS: dump_collection(COLLECTIONS[collection], config, force_ids, print_to_screen) else: print("Please specify the collection to dump")
def get_config(config_file): """ Parse the config file, validate it and convert types. Return a dictionary with all settings. """ specs = ConfigObj(StringIO(CONFIG_SPEC), list_values=False) config = ConfigObj(config_file, configspec=specs) # Create validator validator = Validator({'lowercase_string_list': lowercase_string_list}) # Convert types and validate file result = config.validate(validator, preserve_errors=True, copy=True) logger.debug("Config file version %d", config["version"]) # Raise exceptions for errors for section_list, key, message in flatten_errors(config, result): if key is not None: raise ValueError( "The '%s' key in the section '%s' failed validation: %s" % ( key, ", ".join(section_list), message)) else: raise ValueError( "The following section was missing: %s." % ( ", ".join(section_list))) # For now, no automatic update support. if config["version"] != CONFIG_VERSION: logger.warning( "Config file version is %d, while expected version is %d. Please " "check for inconsistencies and update manually.", config["version"], CONFIG_VERSION) return config
def save(self): config = ConfigObj() for osgroup in self.operating_systems: section = { 'commands': osgroup.update_commands, } if osgroup.description is not None and osgroup.description != '': section['description'] = osgroup.description if osgroup.servers: section['servers'] = osgroup.servers if osgroup.connect_command != DEFAULT_CONNECT_COMMAND: section['connect'] = osgroup.connect_command if osgroup.command_separator != DEFAULT_COMMAND_SEPARATOR: section['command_separator'] = osgroup.command_separator config[osgroup.name] = section for server in osgroup.servers: if server.description: config[server.name] = { 'description': server.description } self.log.debug('Saving configuration to %s' % self.path) config.write(outfile=open(self.path, 'w'))
def write(self): """ Make a copy of the stored config and write it to the configured file """ new_config = ConfigObj(encoding="UTF-8") new_config.filename = self._config_file # first copy over everything from the old config, even if it is not # correctly defined to keep from losing data for key, subkeys in self._config.items(): if key not in new_config: new_config[key] = {} for subkey, value in subkeys.items(): new_config[key][subkey] = value # next make sure that everything we expect to have defined is so for key in _CONFIG_DEFINITIONS.keys(): key, definition_type, section, ini_key, default = self._define(key) self.check_setting(key) if section not in new_config: new_config[section] = {} new_config[section][ini_key] = self._config[section][ini_key] # Write it to file headphones.logger.info("Writing configuration to file") try: new_config.write() except IOError as e: headphones.logger.error("Error writing configuration file: %s", e)
class AFPShareConfig(dict): """ Reader for AFP share configuration files. """ def __init__(self, config_path=DEFAULT_CONFIG_PATH): dict.__init__(self) self.config = None self.path = config_path if not os.path.isfile(self.path): raise AFPShareError('No such file: %s' % self.path) if not os.access(self.path, os.R_OK): raise AFPShareError( 'No permission to read configuration: %s' % self.path ) self.config = ConfigObj(self.path) if self.config.has_key('Options'): self.options = dict(self.config['Options']) else: self.options = {} for k in filter(lambda k: k!='Options', self.config.keys()): try: self[k] = AFPShareDisk(k, self.config[k]) except AFPShareError, e: print e continue
def getGameWeapons(gamename): # If we have no ini to parse, we don't recognize this game inifile = INIPATH.joinpath(gamename + '.ini') if not inifile.isfile(): return None # Parse the ini # Any errors in the ini will be allowed to fail ini = ConfigObj(inifile) # Create the weapon manager for this game settings = ini['settings'] manager = WeaponManager(settings['prefix'], settings['ammoprop'], ini.get('special names', None)) # Populate the new WeaponManager with Weapon instances weapons = ini['weapons'] for w in weapons: current = weapons[w] maxammo = current.get('maxammo', '0') if maxammo.isdigit(): maxammo = int(maxammo) else: maxammo = es.ServerVar('ammo_' + maxammo + '_max') manager._registerWeapon(w, current.get('ammoprop', None), current.get('tags', '').split(','), int(current.get('slot', 0)), int(current.get('clip', 0)), maxammo) # Return a WeaponManager instance for this game return manager
def get_config_dict_full(self, config_path): ''' Get the whole configuration dict: reading the base common-config and using the child_config_dict to update the base_config_dict @param config_path The path for configuration file @return dict The whole configuration dict ''' base_config_dict = {} child_config_dict = ConfigObj(config_path, file_error=True) if child_config_dict['configuration'].has_key('base'): config_path = child_config_dict['configuration']['base'] if config_path.find('$') != -1: config_path = self.parse_item(None, None, None, config_path) base_config_dict = self.get_config_dict_full(config_path) child_configuration_dict = child_config_dict['configuration'] base_configuration_dict = base_config_dict['configuration'] for file_name, file_dict in base_configuration_dict.iteritems(): if file_name in child_configuration_dict: file_dict.update(child_configuration_dict[file_name]) base_config_dict['configuration'].update(base_configuration_dict) child_config_dict.update(base_config_dict) return child_config_dict
def getFacts(self): cpu_utilization = self.getCPU() free_memory = self.getfreeMemory() total_memory = self.gettotalMemory() hostname = self.getHostname() uptime = self.getUptime() platform = self.getPlatform() serial_number = self.getserialNumber() reboot_reason = self.getReasonforReboot() connect_ip = self.address interfaces = self.getInterfaces() facts = { "connect_ip": connect_ip, "serial_number": serial_number, "cpu_utilization": cpu_utilization, "free_system_memory": free_memory, "total_sytem_memory": total_memory, "hostname": hostname, "system_uptime": uptime, "platform": platform, "last_reboot_reason": reboot_reason, "vendor": "cisco", "interfaces": interfaces, "var_name": self.obj, } config = ConfigObj("device_tags.ini").dict() for key in config.keys(): if key == self.address: facts.update(config[key]) return facts
def FileConfig(filename): spec = file(locate_resource('ibid', 'configspec.ini'), 'r') configspec = ConfigObj(spec, list_values=False, encoding='utf-8') config = ConfigObj(filename, configspec=configspec, interpolation='Template', encoding='utf-8') config.validate(Validator()) logging.getLogger('core.config').info(u"Loaded configuration from %s", filename) return config
class Configuration(UserDict): def __init__(self, file=None, spec=None, *args, **kwargs): self.file = file self.spec = spec self.validator = Validator() self.setup_config(file=file, spec=spec) self.validated = self.config.validate(self.validator, copy=True) if self.validated: self.write() UserDict.__init__(self, self.config) def setup_config(self, file, spec): # The default way -- load from a file spec = ConfigObj(spec, list_values=False) try: self.config = ConfigObj(infile=file, configspec=spec, create_empty=True, stringify=True) except ParseError: os.remove(file) self.config = ConfigObj(infile=file, configspec=spec, create_empty=True, stringify=True) raise ConfigurationResetException def __getitem__(self, *args, **kwargs): return dict(self.config).__getitem__(*args, **kwargs) def __setitem__(self, *args, **kwargs): self.config.__setitem__(*args, **kwargs) UserDict.__setitem__(self, *args, **kwargs) def write(self): if hasattr(self.config, "write"): self.config.write()
def test_handle_stringify_off(): c = ConfigObj() c.stringify = False with pytest.raises(TypeError) as excinfo: c['test'] = 1 assert str(excinfo.value) == 'Value is not a string "1".'
def test_creating_with_a_dictionary(): dictionary_cfg_content = { 'key1': 'val1', 'key2': 'val2', 'section 1': { 'key1': 'val1', 'key2': 'val2', 'section 1b': { 'key1': 'val1', 'key2': 'val2', }, }, 'section 2': { 'key1': 'val1', 'key2': 'val2', 'section 2b': { 'key1': 'val1', 'key2': 'val2', }, }, 'key3': 'val3', } cfg = ConfigObj(dictionary_cfg_content) assert dictionary_cfg_content == cfg assert dictionary_cfg_content is not cfg assert dictionary_cfg_content == cfg.dict() assert dictionary_cfg_content is not cfg.dict()
def read(config_file, configspec, server_mode=False, default_section='default_settings', list_values=True): ''' Read the config file with spec validation ''' # configspec = ConfigObj(path.join(path.abspath(path.dirname(__file__)), configspec), # encoding='UTF8', # interpolation='Template', # list_values=False, # _inspec=True) config = ConfigObj(config_file, configspec=path.join(path.abspath(path.dirname(__file__)), configspec), list_values=list_values) validation = config.validate(validate.Validator(), preserve_errors=True) if validation == True: config = dict(config) for section in config: if section != default_section: if server_mode: # When it's a servers config file, retrieve the correct fqdn config[section]['availability'] = True if config[section]['custom_fqdn'] == None: config[section]['custom_fqdn'] = socket.getfqdn() for option in config[section]: # retrieve default configuration for missing values if config[section][option] == None: config[section][option] = config[default_section][option] del(config[default_section]) return config else: raise ConfiguratorException(config_file, validation)
def test_behavior_when_list_values_is_false(): c = ''' key1 = no quotes key2 = 'single quotes' key3 = "double quotes" key4 = "list", 'with', several, "quotes" ''' cfg = ConfigObj(c.splitlines(), list_values=False) assert cfg == { 'key1': 'no quotes', 'key2': "'single quotes'", 'key3': '"double quotes"', 'key4': '"list", \'with\', several, "quotes"' } cfg2 = ConfigObj(list_values=False) cfg2['key1'] = 'Multiline\nValue' cfg2['key2'] = '''"Value" with 'quotes' !''' assert cfg2.write() == [ "key1 = '''Multiline\nValue'''", 'key2 = "Value" with \'quotes\' !' ] cfg2.list_values = True assert cfg2.write() == [ "key1 = '''Multiline\nValue'''", 'key2 = \'\'\'"Value" with \'quotes\' !\'\'\'' ]
def _create_area(area_id, area_content): """Parse area configuration""" config_obj = area_content.replace('{', '').replace('};', '') config_obj = ConfigObj([line.replace(':', '=', 1) for line in config_obj.splitlines()]) config = config_obj.dict() config['REGION'] = area_id try: string_types = basestring except NameError: string_types = str if not isinstance(config['NAME'], string_types): config['NAME'] = ', '.join(config['NAME']) config['XSIZE'] = int(config['XSIZE']) config['YSIZE'] = int(config['YSIZE']) config['AREA_EXTENT'][0] = config['AREA_EXTENT'][0].replace('(', '') config['AREA_EXTENT'][3] = config['AREA_EXTENT'][3].replace(')', '') for i, val in enumerate(config['AREA_EXTENT']): config['AREA_EXTENT'][i] = float(val) config['PCS_DEF'] = _get_proj4_args(config['PCS_DEF']) return pr.geometry.AreaDefinition(config['REGION'], config['NAME'], config['PCS_ID'], config['PCS_DEF'], config['XSIZE'], config['YSIZE'], config['AREA_EXTENT'])
def configure_locale(): logger.debug("Before %s", locale.nl_langinfo(locale.CODESET)) current_locale = locale.getlocale() if current_locale[1] is None: logger.debug("No locale currently set. Attempting to get default locale.") default_locale = locale.getdefaultlocale() if default_locale[1] is None: logger.debug("No default locale exists. Let's try loading from /etc/default/locale") if os.path.exists("/etc/default/locale"): locale_config = ConfigObj("/etc/default/locale") lang = locale_config.get("LANG") new_locale = lang else: logger.error( "/etc/default/locale could not be found! Please run 'sudo update-locale' from command-line." ) sys.exit(1) else: new_locale = default_locale logger.info("New locale set to: %s", locale.setlocale(locale.LC_ALL, new_locale)) reload(sys) sys.setdefaultencoding("UTF-8") current_locale_encoding = locale.getlocale()[1].lower() logger.debug("sys default encoding %s", sys.getdefaultencoding()) logger.debug("After %s", locale.nl_langinfo(locale.CODESET)) if current_locale_encoding not in ["utf-8", "utf8"]: logger.error("Need a UTF-8 locale. Currently '%s'. Exiting..." % current_locale_encoding) sys.exit(1)
def main(): log.startLogging(sys.stderr) conf_file = os.path.join( xdg.BaseDirectory.save_config_path('socialsound'), 'config.ini') config = ConfigObj(conf_file) username = config['Pandora']['username'] password = config['Pandora']['password'] proxy = config.get('Proxy', {}) proxy_host = proxy.get('host', None) proxy_port = proxy.get('port', None) proxy = {} if proxy_host: proxy['proxy_host'] = proxy_host if proxy_port: proxy['proxy_port'] = proxy_port # Start websocket listener factory = PandoraFactory("ws://localhost:9000", debug=False, username=username, password=password, **proxy) factory.protocol = PandoraProtocol listenWS(factory) # Start the MPD client factory. factory.player = PlayerFactory() reactor.connectTCP(MPD_HOST, MPD_PORT, factory.player) reactor.run()
def read_my_cnf_files(self, files, keys): """ Reads a list of config files and merges them. The last one will win. :param files: list of files to read :param keys: list of keys to retrieve :returns: tuple, with None for missing keys. """ cnf = ConfigObj() for _file in files: try: cnf.merge(ConfigObj(os.path.expanduser(_file), interpolation=False)) except ConfigObjError as e: self.logger.error("Error parsing %r.", _file) self.logger.error("Recovering partially parsed config values.") cnf.merge(e.config) pass sections = ["client"] if self.defaults_suffix: sections.append("client{0}".format(self.defaults_suffix)) def get(key): result = None for sect in sections: if sect in cnf and key in cnf[sect]: result = cnf[sect][key] return result return dict([(x, get(x)) for x in keys])
def send_email(subject, body_text, to_emails, cc_emails, bcc_emails): base_path = os.path.dirname(os.path.abspath(__file__)) config_path = os.path.join(base_path, "config.ini") if os.path.exists(config_path): cfg = ConfigObj(config_path) cfg_dict = cfg.dict() else: print "Config not found! Exiting!" sys.exit(1) host = cfg_dict["server"] from_addr = cfg_dict["from_addr"] username = cfg_dict["username"] password = cfg_dict["password"] BODY = string.join(( "From: %s" % from_addr, "To: %s" % ', '.join(to_emails), "CC: %s" % ', '.join(cc_emails), "BCC: %s" % ', '.join(bcc_emails), "Subject: %s" % subject , "", body_text ), "\r\n") emails = to_emails + cc_emails + bcc_emails server = smtplib.SMTP(host) server.starttls() server.login(username, password) server.sendmail(from_addr, receivers, BODY) server.quit()
def save(self,filename): fconfig = ConfigObj() if len(filename)>0: fconfig.filename = filename else: fconfig.filename = self.configFile fconfig['server'] = {} fconfig['server']['url'] = self.importServerUrl fconfig['file'] = {} fconfig['file']['delimiter'] = self.csvDelimiter fconfig['file']['enclosure'] = self.csvEnclosure fconfig['file']['batchsize'] = self.batchSize fconfig['file']['default_category']=self.defaultCategory fconfig['file']['user_per_batch']=self.usersPerBatch fconfig['paths'] = {} fconfig['paths']['queue'] = self.queuePath fconfig['paths']['processed'] = self.processedPath fconfig['paths']['names'] = self.namesPath fconfig[self.sourceName] = {} fconfig[self.sourceName]['banned'] = self.bannedTerms fconfig[self.sourceName]['categories'] = self.categoryMapping fconfig.write()
def writeDataStore(self): filename = "/etc/WV5Datastore.cfg" config = ConfigObj(filename) config.filename = filename config["DataStore"] = {} config["DataStore"]["TransceiverSerNo"] = self.TransceiverSerNo config.write()
def setTransmissionFrequency(self, val): filename = "/etc/WV5Datastore.cfg" config = ConfigObj(filename) config.filename = filename config["TransceiverSettings"] = {} config["TransceiverSettings"]["TransmissionFrequency"] = val config.write()
def save(self): """Save cached data Replace data file with current object contents """ if self.path is None: return c = ConfigObj() for key, opts in self.items(): if len(opts) == 0: continue if key not in c: c[key] = {} for i, data in opts.items(): i = str(i) c[key][i] = data try: c.write(open(self.path, 'w')) except OSError as e: raise SNMPError('Error writing index cache {0}: {1}'.format(self.path, e)) except IOError as e: raise SNMPError('Error writing index cache {0}: {1}'.format(self.path, e))
def save_experiment(self, path, experiment): """ """ conf = ConfigObj(experiment.preferences_from_members(), encoding='utf8') with open(path, 'wb') as f: conf.write(f)
def create_default_config(configspec, includeComments=False): configfolder = os.path.join(os.path.expanduser('~'), '.shakemap') if not os.path.isdir(configfolder): os.mkdir(configfolder) outfile = os.path.join(configfolder, 'config.ini') config = ConfigObj(configspec=configspec, stringify=False) validator = get_custom_validator() config.validate(validator, copy=True) lines = config.write() f = open(outfile, 'wt') # TODO - should we write out the docs for the parameters that have # default=None? for line in lines: if line.strip().startswith('#') and not includeComments: continue parts = line.split('=') # this is a hack because I can't figure out what to do with floats with default value of None. # tried stringify=False/True, still get errors when I try to validate if len(parts) > 1 and parts[1].strip() == '""': continue if not len(line.strip()): continue f.write(line + '\n') f.close() return outfile
def default_config(): cfg_spec = ConfigObj(config_spec_text.splitlines(), list_values=False) valid = Validator() cfg = ConfigObj(configspec=cfg_spec, stringify=True, list_values=True) test = cfg.validate(valid, copy=True) if test: return cfg
def test_with_default(self): c = ConfigObj() c['a'] = 3 self.assertEqual(c.pop('a'), 3) self.assertEqual(c.pop('b', 3), 3) self.assertRaises(KeyError, c.pop, 'c')
def test_no_parent(tmpdir, specpath): ini = tmpdir.join('config.ini') ini.write('[[haha]]') with pytest.raises(NestingError): conf = ConfigObj(str(ini), configspec=specpath, file_error=True)
# set to true to mount a block device (eg a USB stick) for writing data mount = boolean(default=False) # device to be mounted device = string(default=/dev/sda1) # the mount point mntpnt = string(default=/mnt) [coap] # The server address to listen on, can be an IP address or resolvable host name. By default listen on all interfaces address = string(default="::") # The port to listen on. By default use the CoAP port 5683 port = integer(default=5683) """ # populate the default server config object which is used as a validator piccoloServerDefaults = ConfigObj(defaultCfgStr.split('\n')) validator = Validator() class PiccoloServerConfig(object): """object managing the piccolo server configuration""" def __init__(self): self._cfg = ConfigObj(configspec=piccoloServerDefaults) self._cfg.validate(validator) parser = ArgumentParser() parser.add_argument('-s', '--server-configuration', metavar='CFG', help="read configuration from CFG") parser.add_argument('-d',
def __init__(self, ini): config = ConfigObj(ini) # project level params self.ProjectName = config['Project']['ProjectName'] self.InputFolder = config['Project']['InputFolder'] self.OutputFolder = os.path.join(config['Project']['OutputFolder'], self.ProjectName) self.rgnmapdir = config['Project']['rgnmapdir'] self.OutputFormat = int(config['Project']['OutputFormat']) self.OutputUnit = int(config['Project']['OutputUnit']) self.PerformDiagnostics = int(config['Project']['PerformDiagnostics']) self.PerformTemporal = int(config['Project']['PerformTemporal']) try: self.Logger = config['Logger'] self.Logger['logdir'] = self.OutputFolder except KeyError: # No logger configuration. Supply a default one for backward compatibility with old config files. self.Logger = {'logdir': 'logs', 'filename': 'mainlog.txt'} # spatial params try: self.SpatialResolution = float( config['Project']['SpatialResolution']) except: self.SpatialResolution = 0.5 self.mapsize = [ int(180 / self.SpatialResolution), int(360 / self.SpatialResolution) ] # GCAM access settings self.GCAM_DBpath = os.path.join(self.InputFolder, config['GCAM']['GCAM_DBpath']) self.GCAM_DBfile = config['GCAM']['GCAM_DBfile'] self.GCAM_query = os.path.join(self.GCAM_DBpath, config['GCAM']['GCAM_query']) self.subreg = int(config['GCAM']['GCAM_subreg']) self.years = config['GCAM']['GCAM_Years'] # reference data self.Area = os.path.join(self.InputFolder, config['GriddedMap']['Area']) self.Coord = os.path.join(self.InputFolder, config['GriddedMap']['Coord']) self.aez = os.path.join(self.InputFolder, config['GriddedMap']['AEZ']) self.InputBasinFile = os.path.join(self.InputFolder, config['GriddedMap']['BasinIDs']) self.BasinNames = os.path.join(self.InputFolder, config['GriddedMap']['BasinNames']) self.gcam_basin_lu = os.path.join( self.InputFolder, config['GriddedMap']['GCAM_Basin_Key']) self.InputRegionFile = os.path.join(self.InputFolder, config['GriddedMap']['RegionIDs']) self.RegionNames = os.path.join(self.InputFolder, config['GriddedMap']['RegionNames']) self.InputCountryFile = os.path.join( self.InputFolder, config['GriddedMap']['CountryIDs']) self.CountryNames = os.path.join(self.InputFolder, config['GriddedMap']['CountryNames']) self.Population_GPW = os.path.join( self.InputFolder, config['GriddedMap']['Population_GPW']) self.Population_HYDE = os.path.join( self.InputFolder, config['GriddedMap']['Population_HYDE']) self.Irrigation_GMIA = os.path.join( self.InputFolder, config['GriddedMap']['Irrigation_GMIA']) self.Irrigation_HYDE = os.path.join( self.InputFolder, config['GriddedMap']['Irrigation_HYDE']) self.Livestock_Buffalo = os.path.join( self.InputFolder, config['GriddedMap']['Livestock_Buffalo']) self.Livestock_Cattle = os.path.join( self.InputFolder, config['GriddedMap']['Livestock_Cattle']) self.Livestock_Goat = os.path.join( self.InputFolder, config['GriddedMap']['Livestock_Goat']) self.Livestock_Sheep = os.path.join( self.InputFolder, config['GriddedMap']['Livestock_Sheep']) self.Livestock_Poultry = os.path.join( self.InputFolder, config['GriddedMap']['Livestock_Poultry']) self.Livestock_Pig = os.path.join( self.InputFolder, config['GriddedMap']['Livestock_Pig']) self.buff_fract = os.path.join( self.InputFolder, config['GriddedMap']['Buffalo_Fraction']) self.goat_fract = os.path.join(self.InputFolder, config['GriddedMap']['Goat_Fraction']) self.irrigated_fract = os.path.join( self.InputFolder, config['GriddedMap']['Irrigated_Fract']) if self.PerformTemporal: self.TempMonthlyFile = config['TemporalDownscaling'][ 'Temp_Monthly'] self.HDDCDDMonthlyFile = config['TemporalDownscaling'][ 'HDD_CDD_Monthly'] self.Domestic_R = config['TemporalDownscaling']['Domestic_R'] self.Elec_Building = config['TemporalDownscaling']['Elec_Building'] self.Elec_Industry = config['TemporalDownscaling']['Elec_Industry'] self.Elec_Building_heat = config['TemporalDownscaling'][ 'Elec_Building_heat'] self.Elec_Building_cool = config['TemporalDownscaling'][ 'Elec_Building_cool'] self.Elec_Building_others = config['TemporalDownscaling'][ 'Elec_Building_others'] self.Irr_MonthlyData = config['TemporalDownscaling'][ 'Irr_MonthlyData'] self.TemporalInterpolation = int( config['TemporalDownscaling']['TemporalInterpolation']) self.check_existence()
timeSinceInput = boolean(default=false) vision = boolean(default=false) speech = boolean(default=false) speechManager = boolean(default=false) synthDriver = boolean(default=false) nvwave = boolean(default=false) [uwpOcr] language = string(default="") [upgrade] newLaptopKeyboardLayout = boolean(default=false) [editableText] caretMoveTimeoutMs = integer(min=0, max=2000, default=100) [development] enableScratchpadDir = boolean(default=false) [featureFlag] # 0:default, 1:yes, 2:no cancelExpiredFocusSpeech = integer(0, 2, default=0) """ #: The configuration specification #: @type: ConfigObj confspec = ConfigObj(StringIO(configSpecString), list_values=False, encoding="UTF-8") confspec.newlines = "\r\n"
sys.setdefaultencoding("UTF-8") current_locale_encoding = locale.getlocale()[1].lower() logger.debug("sys default encoding %s", sys.getdefaultencoding()) logger.debug("After %s", locale.nl_langinfo(locale.CODESET)) if current_locale_encoding not in ["utf-8", "utf8"]: logger.error("Need a UTF-8 locale. Currently '%s'. Exiting..." % current_locale_encoding) sys.exit(1) configure_locale() # loading config file try: config = ConfigObj("/etc/airtime/airtime.conf") except Exception as e: logger.error("Error loading config file: %s", e) sys.exit(1) class Global: def __init__(self, api_client): self.api_client = api_client def selfcheck(self): return self.api_client.is_server_compatible() def test_api(self): self.api_client.test()
import os import shutil import sys from configobj import ConfigObj import json from distutils.util import strtobool #path_model = './' #path_model = sys.argv[1] #print (sys.argv) #os.chdir(path_model) startTime = datetime.now() # get runtime params from config file config = ConfigObj('runtime_params.ini') parallel_mode = bool(strtobool(config['parallel_mode'])) model_mode = config['model_mode'] short_test = int(config['short_test']) print_log = bool(strtobool(config['print_log'])) seed = int(config['seed']) scenario_name = config[ 'scenario_name'] #scenarios provide information on infrastructural plans flow_input_type = config['flow_input_type'] flow_input_source = config['flow_input_source'] total_sensitivity_factors = int(config['total_sensitivity_factors']) sensitivity_sample_file = config['sensitivity_sample_file'] output_list = config['output_list'] output_directory = config['output_directory'] clean_output = bool(strtobool(config['clean_output'])) save_full = bool(strtobool(config['save_full']))
def __get_value_from_settings(prop_label): config = ConfigObj('misc/settings.ini') return config[prop_label]
def conf(inipath, specpath): return ConfigObj(inipath, configspec=specpath)
def get_config(config_path=None): """reads the config file, validates it and return a config dict :param config_path: path to a custom config file, if none is given the default locations will be searched :type config_path: str :returns: configuration :rtype: dict """ if config_path is None: config_path = _find_configuration_file() logger.debug('using the config file at {}'.format(config_path)) try: user_config = ConfigObj( config_path, configspec=SPECPATH, interpolation=False, file_error=True, ) except ConfigObjError as error: logger.fatal('parsing the config file file with the following error: ' '{}'.format(error)) logger.fatal('if you recently updated khal, the config file format ' 'might have changed, in that case please consult the ' 'CHANGELOG or other documentation') raise CannotParseConfigFileError() fdict = { 'timezone': is_timezone, 'expand_path': expand_path, 'expand_db_path': expand_db_path, 'weeknumbers': weeknumber_option, 'color': is_color, } validator = Validator(fdict) results = user_config.validate(validator, preserve_errors=True) abort = False for section, subsection, error in flatten_errors(user_config, results): abort = True if isinstance(error, Exception): logger.fatal('config error:\n' 'in [{}] {}: {}'.format(section[0], subsection, error)) else: for key in error: if isinstance(error[key], Exception): logger.fatal('config error:\nin {} {}: {}'.format( sectionize(section + [subsection]), key, str(error[key]))) if abort or not results: raise InvalidSettingsError() config_checks(user_config) extras = get_extra_values(user_config) for section, value in extras: if section == (): logger.warn('unknown section "{}" in config file'.format(value)) else: section = sectionize(section) logger.warn('unknown key or subsection "{}" in ' 'section "{}"'.format(value, section)) return user_config
def test_invalid_lines_with_percents(tmpdir, specpath): ini = tmpdir.join('config.ini') ini.write('extra: %H:%M\n') with pytest.raises(ParseError): conf = ConfigObj(str(ini), configspec=specpath, file_error=True)
from logHandler import logger logging = logger.getChild('core.config') import os import confspecs import paths import application from UserDict import UserDict from configobj import ConfigObj, ParseError from validate import Validator, VdtValueError configFile = paths.data_path(application.name + ".ini") confspec = ConfigObj(confspecs.defaults, list_values=False, encoding="UTF-8") confspec.newlines = "\r\n" conf = None class ConfigurationResetException(Exception): pass class Configuration(UserDict): def __init__(self, file=None, spec=None, *args, **kwargs): self.file = file self.spec = spec self.validator = Validator() self.setup_config(file=file, spec=spec) self.validated = self.config.validate(self.validator, copy=True) if self.validated: self.write() UserDict.__init__(self, self.config)
def test_init_config(self): from configobj import ConfigObj config = ConfigObj('test/bot.conf')
def reloadConfig(self): try: self.config = ConfigObj("./config/mitmf.conf") except Exception as e: mitmf_logger.error("Error reloading config file: {}".format(e)) pass
def test_configfile(self): from configobj import ConfigObj config = ConfigObj('config/mitmf.conf')
def parse_ini_races(): races = OrderedDict() if (CFG_PATH / 'races.ini').isfile(): imported = ConfigObj(CFG_PATH / 'races.ini') no_category = [] for name, data in imported.items(): for alias, value in data.items(): if alias.startswith('racealias_'): _aliases[alias] = value fixed_name = FIX_NAME.sub('', name.lower().replace(' ', '_')) settings = races[fixed_name] = ImportedRace(fixed_name, ModuleType.ESS_INI) settings.cmds['preloadcmd'] = data['preloadcmd'] settings.cmds['roundstartcmd'] = data['roundstartcmd'] settings.cmds['roundendcmd'] = data['roundendcmd'] settings.cmds['spawncmd'] = data['spawncmd'] settings.cmds['deathcmd'] = data['deathcmd'] settings.cmds['changeintocmd'] = None settings.cmds['changefromcmd'] = data['onchange'] settings.config['required'] = int(data['required']) settings.config['maximum'] = int(data['maximum']) settings.config['restrictmap'] = data['restrictmap'].split('|') if data['restrictmap'] else [] settings.config['restrictitem'] = data['restrictitem'].split('|') if data['restrictitem'] else [] settings.config['restrictweapon'] = [] settings.config['restrictteam'] = int(data['restrictteam']) settings.config['teamlimit'] = int(data.get('teamlimit', 0)) settings.config['author'] = data['author'] settings.config['allowonly'] = data['allowonly'].split('|') if data['allowonly'] else [] skillnames = data['skillnames'].split('|') skilldescr = data['skilldescr'].split('|') skillcfg = data['skillcfg'].split('|') skillneeded = data['skillneeded'].split('|') numberoflevels = map(int, data['numberoflevels'].split('|')) if '|' in data['numberoflevels'] else [int(data['numberoflevels'])] * len(skillnames) skills = settings.config['skills'] = {} for i, skill_name in enumerate(skillnames): fixed_skill_name = FIX_NAME.sub('', skill_name.lower().replace(' ', '_')) settings.strings[fixed_skill_name] = _LanguageString(skill_name) settings.strings[f'{fixed_skill_name} description'] = _LanguageString(skilldescr[i].replace(r'\n', '')) skill = skills[fixed_skill_name] = {} skill['event'] = [skillcfg[i]] skill['required'] = [int(skillneeded[i])] * numberoflevels[i] if 'cooldown' in data[f'skill{i + 1}']: skill['cooldown'] = list(map(lambda x: float(x) if '.' in x else int(x), data[f'skill{i + 1}']['cooldown'].split('|'))) if not len(skill['cooldown']) == numberoflevels[i]: skill['cooldown'] = [skill['cooldown'][0]] * numberoflevels[i] skill['variables'] = {} skill['cmds'] = {} skill['cmds']['setting'] = data[f'skill{i + 1}']['setting'].split('|') if 'block' in data[f'skill{i + 1}']: skill['cmds']['cmd'] = 'es_xdoblock ' + data[f'skill{i + 1}']['block'] else: skill['cmds']['cmd'] = data[f'skill{i + 1}']['cmd'] skill['cmds']['sfx'] = data[f'skill{i + 1}']['sfx'] count = len(data[f'skill{i + 1}']['setting'].split('|')) if count: skill['maximum'] = count else: skill['maximum'] = numberoflevels[i] for alias, value in data[f'skill{i + 1}'].items(): if alias.startswith('racealias_'): _aliases[alias] = value settings.strings['name'] = _LanguageString(name) settings.strings['description'] = _LanguageString(data['desc'].replace(r'\n', '')) categories = (data['category'].split('|') if data['category'] and not data['category'] == '0' else []) if 'category' in data else [] if categories: for category in categories: if category == '0': no_category.append(settings) continue fixed_category = FIX_NAME.sub('', category.lower().replace(' ', '_')) if fixed_category not in categories_strings: categories_strings[fixed_category] = _LanguageString(category) settings.add_to_category(fixed_category) else: no_category.append(settings) for settings in no_category: settings.add_to_category(None) return races
def write_conf(conf_dict, conf_file): config = ConfigObj() config.filename = conf_file for k, v in conf_dict.items(): config[k] = v config.write()
def initialize(file=None, logging_level='INFO'): """Read the configuration file containing the run's parameters. This should be the first call of most (all?) OGGM simulations. Parameters ---------- file : str path to the configuration file (default: OGGM params.cfg) logging_level : str set a logging level. See :func:`set_logging_config` for options. """ global IS_INITIALIZED global PARAMS global PATHS set_logging_config(logging_level=logging_level) if file is None: file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'params.cfg') try: cp = ConfigObj(file, file_error=True) except (ConfigObjError, IOError) as e: log.critical('Config file could not be parsed (%s): %s', file, e) sys.exit() log.workflow('Using configuration file: %s', file) # Paths oggm_static_paths() PATHS['working_dir'] = cp['working_dir'] PATHS['dem_file'] = cp['dem_file'] PATHS['climate_file'] = cp['climate_file'] # Multiprocessing pool PARAMS['use_multiprocessing'] = cp.as_bool('use_multiprocessing') PARAMS['mp_processes'] = cp.as_int('mp_processes') # Some non-trivial params PARAMS['continue_on_error'] = cp.as_bool('continue_on_error') PARAMS['grid_dx_method'] = cp['grid_dx_method'] PARAMS['topo_interp'] = cp['topo_interp'] PARAMS['use_intersects'] = cp.as_bool('use_intersects') PARAMS['use_compression'] = cp.as_bool('use_compression') PARAMS['mpi_recv_buf_size'] = cp.as_int('mpi_recv_buf_size') PARAMS['use_multiple_flowlines'] = cp.as_bool('use_multiple_flowlines') PARAMS['filter_min_slope'] = cp.as_bool('filter_min_slope') PARAMS['auto_skip_task'] = cp.as_bool('auto_skip_task') PARAMS['correct_for_neg_flux'] = cp.as_bool('correct_for_neg_flux') PARAMS['filter_for_neg_flux'] = cp.as_bool('filter_for_neg_flux') PARAMS['run_mb_calibration'] = cp.as_bool('run_mb_calibration') PARAMS['rgi_version'] = cp['rgi_version'] PARAMS['use_rgi_area'] = cp.as_bool('use_rgi_area') PARAMS['compress_climate_netcdf'] = cp.as_bool('compress_climate_netcdf') PARAMS['use_tar_shapefiles'] = cp.as_bool('use_tar_shapefiles') PARAMS['clip_mu_star'] = cp.as_bool('clip_mu_star') # Climate PARAMS['baseline_climate'] = cp['baseline_climate'].strip().upper() PARAMS['baseline_y0'] = cp.as_int('baseline_y0') PARAMS['baseline_y1'] = cp.as_int('baseline_y1') PARAMS['hydro_month_nh'] = cp.as_int('hydro_month_nh') PARAMS['hydro_month_sh'] = cp.as_int('hydro_month_sh') PARAMS['temp_use_local_gradient'] = cp.as_bool('temp_use_local_gradient') PARAMS['tstar_search_glacierwide'] = cp.as_bool('tstar_search_glacierwide') k = 'temp_local_gradient_bounds' PARAMS[k] = [float(vk) for vk in cp.as_list(k)] k = 'tstar_search_window' PARAMS[k] = [int(vk) for vk in cp.as_list(k)] PARAMS['use_bias_for_run'] = cp.as_bool('use_bias_for_run') # Inversion k = 'use_shape_factor_for_inversion' PARAMS[k] = cp[k] # Flowline model k = 'use_shape_factor_for_fluxbasedmodel' PARAMS[k] = cp[k] # Make sure we have a proper cache dir from oggm.utils import download_oggm_files, get_demo_file download_oggm_files() # Delete non-floats ltr = [ 'working_dir', 'dem_file', 'climate_file', 'use_tar_shapefiles', 'grid_dx_method', 'run_mb_calibration', 'compress_climate_netcdf', 'mp_processes', 'use_multiprocessing', 'baseline_y0', 'baseline_y1', 'temp_use_local_gradient', 'temp_local_gradient_bounds', 'topo_interp', 'use_compression', 'bed_shape', 'continue_on_error', 'use_multiple_flowlines', 'tstar_search_glacierwide', 'mpi_recv_buf_size', 'hydro_month_nh', 'clip_mu_star', 'tstar_search_window', 'use_bias_for_run', 'hydro_month_sh', 'use_intersects', 'filter_min_slope', 'auto_skip_task', 'correct_for_neg_flux', 'filter_for_neg_flux', 'rgi_version', 'use_shape_factor_for_inversion', 'use_rgi_area', 'use_shape_factor_for_fluxbasedmodel', 'baseline_climate' ] for k in ltr: cp.pop(k, None) # Other params are floats for k in cp: PARAMS[k] = cp.as_float(k) # Read-in the reference t* data - maybe it will be used, maybe not fns = [ 'ref_tstars_rgi5_cru4', 'ref_tstars_rgi6_cru4', 'ref_tstars_rgi5_histalp', 'ref_tstars_rgi6_histalp' ] for fn in fns: PARAMS[fn] = pd.read_csv(get_demo_file('oggm_' + fn + '.csv')) fpath = get_demo_file('oggm_' + fn + '_calib_params.json') with open(fpath, 'r') as fp: mbpar = json.load(fp) PARAMS[fn + '_calib_params'] = mbpar # Empty defaults set_intersects_db() IS_INITIALIZED = True # Pre extract cru cl to avoid problems by multiproc from oggm.utils import get_cru_cl_file get_cru_cl_file()
sgroup = parser.add_argument_group("%s" % p.name, p.desc) sgroup.add_argument("--%s" % p.optname, action="store_true", help="Load plugin %s" % p.name) if p.has_opts: p.add_options(sgroup) except NotImplementedError: sys.exit("[-] %s plugin claimed option support, but didn't have it." % p.name) args = parser.parse_args() try: configfile = ConfigObj(args.configfile) except Exception, e: sys.exit("[-] Error parsing config file: " + str(e)) config_args = configfile['MITMf']['args'] if config_args: print "[*] Loading arguments from config file" for arg in config_args.split(' '): sys.argv.append(arg) args = parser.parse_args() #################################################################################################### # Here we check for some variables that are very commonly used, and pass them down to the plugins try: args.ip_address = get_if_addr(args.interface)
def _get_default_config(): '''Create the default configuration object.''' _default_config = ConfigObj() _default_config.merge({ 'scale_factor': 1., 'iter_list': ['g_AMPA_total', 'g_GABA_total'], 'output_dir': 'panels/', 'grids_data_root': 'simulation_data/main_network/grids', 'bump_data_root': 'simulation_data/main_network/gamma_bump', 'vel_data_root': 'simulation_data/main_network/velocity', 'const_pos_data_root': 'simulation_data/main_network/const_position', 'singleDataRoot': 'simulation_data/main_network/single_neuron', 'connection_data_root': 'simulation_data/main_network/connections', 'even_shape': (31, 31), 'noise_sigmas': [0, 150, 300], # Sections 'mpl': { 'font.size': 11, 'pdf.fonttype': 42, 'mathtext.default': 'regular', 'font.sans-serif': ['Helvetica', 'Avant Garde', 'Computer Modern Sans serif'], 'xtick.major.size': tick_len, 'xtick.major.width': tick_width, 'xtick.minor.size': tick_len / 2., 'xtick.minor.width': tick_width, 'xtick.direction': 'out', 'ytick.major.size': tick_len, 'ytick.major.width': tick_width, 'ytick.minor.size': tick_len / 2., 'ytick.minor.width': tick_width, 'ytick.direction': 'out', }, 'sweeps': { 'fig_size': (3.7, 2.6), # inches 'bbox': (0.08, 0.2, .72, .65), # l, b, w, h 'transparent': True, 'grid_contours': [.5], 'contours_kwargs': { 'hold': True, 'colors': 'k', 'linewidths': [1.5] }, }, 'grids': { 'example_rc': ((5, 15), (15, 5)), 'example_idx': [(5, 15), (5, 15), (5, 15)], # (row, col) 'ntrials': 3, }, 'gamma': { 'example_rc': ((5, 15), (15, 5)), }, 'bumps': { 'n_trials': 5, }, 'p_bumps': { 'frac_total_text': 'P(bumps)' }, 'bump_sigma': { 'sigma_bump_text': '$\sigma_{bump}^{-1}\ (neurons^{-1})$', }, 'seizures': { 'thetaT': 125., # ms 'sig_dt': .5 # ms }, 'vel_rasters': { 'tLimits': [2e3, 3e3], # ms 'trialNum': 0, 'ylabelPos': -0.22, }, }) ############################################################################## GridSweepsPlotter_config = { 'cbar': [0, 0, 1], 'cbar_kw': { 'label': 'Gridness score', 'location': 'right', 'shrink': 0.8, 'pad': -0.05, 'ticks': ti.MultipleLocator(0.5), 'rasterized': True }, 'sigma_title': True, 'vmin': -0.5, 'vmax': 1.111, 'xlabel': [None, None, None], 'xticks': [True, True, True], 'ylabel': [None, '', ''], 'yticks': [True, False, False], 'ann': [ dict(txt='b', rc=_default_config['grids']['example_rc'][0], xytext_offset=(1.5, 1), color='black'), dict(txt='a', rc=_default_config['grids']['example_rc'][1], xytext_offset=(0.5, 1.5), color='black') ], 'plot_contours': [0, 0, 0], } _default_config['GridSweepsPlotter'] = GridSweepsPlotter_config ############################################################################## GridExamplesPlotter_config = { 'fig_size': (1, 1.2), 'ax_box': (0.01, 0.01, 0.99, 0.85), # l, b, r, t 'transparent': True, 'population_type': 'E', } _default_config['GridExamplesPlotter'] = GridExamplesPlotter_config ############################################################################## GridExampleRectPlotter_config = { 'cbar_kw': { 'label': 'Gridness score', 'location': 'right', 'shrink': 0.8, 'pad': -0.05, 'ticks': ti.MultipleLocator(0.5), 'rasterized': True }, 'vmin': -0.505, 'vmax': 1.111, 'fig_saver': PdfOutputSaver(None, 'pdf') } _default_config['GridExampleRectPlotter'] = GridExampleRectPlotter_config ########################################################################## SpatialInfoPlotter_config = { 'cbar': [0, 0, 0], 'cbar_kw': { 'label': 'Information (bits/spike)', 'location': 'right', 'shrink': 0.8, 'pad': -0.15, 'ticks': ti.MultipleLocator(0.5), 'rasterized': True }, 'sigma_title': True, 'vmin': 0.14, 'vmax': 2.66, 'xlabel': ['', '', ''], 'xticks': [False, False, False], 'ylabel': [None, '', ''], 'yticks': [True, False, False], 'plot_contours': [1, 1, 1], } _default_config['SpatialInfoPlotter'] = SpatialInfoPlotter_config ########################################################################## SpatialSparsityPlotter_config = { 'cbar': [0, 0, 0], 'cbar_kw': { 'label': 'Sparsity', 'location': 'right', 'shrink': 0.8, 'pad': -0.15, 'ticks': ti.MultipleLocator(0.2), 'rasterized': True }, 'sigma_title': True, 'vmin': 0.12, 'vmax': 0.89, 'xlabel': ['', '', ''], 'xticks': [False, False, False], 'ylabel': [None, '', ''], 'yticks': [True, False, False], 'plot_contours': [1, 1, 1], } _default_config['SpatialSparsityPlotter'] = SpatialSparsityPlotter_config ############################################################################## GridnessCorrelationPlotter_config = { 'fig_size': (3.5, 1.5), 'bbox_rect': (0.2, 0.35, 0.95, .95), } _default_config[ 'GridnessCorrelationPlotter'] = GridnessCorrelationPlotter_config ############################################################################## GridsDiffSweep_config = { 'cbar_kw': dict(label='$\Delta_{150 - 0}$(Gridness score)', location='right', shrink=0.8, pad=-0.05, ticks=ti.MultipleLocator(0.5), rasterized=True) } _default_config['GridsDiffSweep'] = GridsDiffSweep_config ############################################################################## GridDetailedNoisePlotter_config = { 'legend': ['a', 'b'], 'legend_kwargs': dict( loc=(0.8, 1), fontsize='small', frameon=False, numpoints=1, handletextpad=0.05, ) } _default_config[ 'GridDetailedNoisePlotter'] = GridDetailedNoisePlotter_config ############################################################################## GridDetailedNoisePlotter_config = { 'legend': ['a', 'b'], 'legend_kwargs': dict( loc=(0.8, 1), fontsize='small', frameon=False, numpoints=1, handletextpad=0.05, ) } _default_config[ 'GridDetailedNoisePlotter'] = GridDetailedNoisePlotter_config ############################################################################## GammaDetailedNoisePlotter_config = { 'legend': ['a', 'b'], 'legend_kwargs': dict( loc=(0.85, 0.7), fontsize='small', frameon=False, numpoints=1, handletextpad=0.05, ) } _default_config[ 'GammaDetailedNoisePlotter'] = GammaDetailedNoisePlotter_config ############################################################################## VmExamplesPlotter_config = { 'fig_size': (2.5, 1.25), 'ax_rect': (0.01, 0.01, 0.999, 0.6), # l, b, r, t } _default_config['VmExamplesPlotter'] = VmExamplesPlotter_config ############################################################################## ConnectionFunctionPlotter_config = { 'fig_size': (3, 1.5), 'bbox_rect': (.2, .25, .95, .75), 'uniform_random': False, 'leg1_kwargs': dict(loc=(.6, .9), frameon=False, fontsize='x-small', ncol=1), 'leg2_kwargs': dict(loc=(0.45, 1.03), frameon=False, fontsize='x-small'), } _default_config[ 'ConnectionFunctionPlotter'] = ConnectionFunctionPlotter_config ############################################################################## GammaSweepsPlotter_config = { 'scale_factor': .9, 'cbar': [1, 0, 0], 'cbar_kw': { # This has to match cbar_kw-s below 'location': 'left', }, 'AC_cbar_kw': dict( location='left', ticks=ti.MultipleLocator(0.3), fraction=0.25, shrink=0.8, pad=.2, labelpad=8, label='$1^{st}$ autocorrelation\npeak', rasterized=True, ), 'AC_xticks': [False] * 3, 'AC_yticks': [1, 0, 0], 'AC_sigma_title': True, 'AC_vmin': -0.09, 'AC_vmax': 0.675, 'F_cbar_kw': dict(location='left', ticks=ti.MultipleLocator(30), fraction=0.25, shrink=0.8, pad=.2, labelpad=8, label='Oscillation\nfrequency (Hz)', extend='max', extendfrac=0.1, rasterized=True), 'F_xticks': [True] * 3, 'F_yticks': [1, 0, 0], 'F_sigma_title': False, 'F_vmin': 30, 'F_vmax': 120, 'ann': [ dict( txt='b', rc=None, xytext_offset=(1.5, 0), color='white', ), dict( txt='a', rc=None, xytext_offset=(-.5, 2.), color='white', ), ], 'plot_grid_contours': [0, 1, 0], } _default_config['GammaSweepsPlotter'] = GammaSweepsPlotter_config tmp = GammaSweepsPlotter_config _default_config['GammaSweepsPlotter']['ann'][0]['rc'] = \ _default_config['gamma']['example_rc'][0] _default_config['GammaSweepsPlotter']['ann'][1]['rc'] = \ _default_config['gamma']['example_rc'][1] _default_config['GammaSweepsPlotter'].update({ 'annF': deepcopy(tmp['ann']), }) ############################################################################## GammaExamplePlotter_config = { # index0: noise_sigma # index1: example index 'xscales': [ [0, 0, 0], [0, 0, 1], ], 'sigma_titles': [ [0, 0, 0], [1, 1, 1], ], 'xscale_kw': dict(scaleLen=50, x=0.75, y=-0.07, size='x-small'), 'yscale_kw': [[ dict(scaleLen=5, unitsText='nA', x=.5, y=.1, size='x-small'), dict(scaleLen=0.5, unitsText='nA', x=.5, y=.05, size='x-small'), dict(scaleLen=0.5, unitsText='nA', x=.5, y=.05, size='x-small') ], [ dict(scaleLen=5, unitsText='nA', x=.5, y=.1, size='x-small'), dict(scaleLen=0.5, unitsText='nA', x=.5, y=.05, size='x-small'), dict(scaleLen=0.5, unitsText='nA', x=.55, y=0, size='x-small') ]], } _default_config['GammaExamplePlotter'] = GammaExamplePlotter_config ############################################################################## GammaScatterAllPlotter_config = { 'fig_size': (4.2, 2), 'dot_size': 6, 'legend_kwargs': dict(loc=(0.9, 0.4), fontsize='small', frameon=False, numpoints=1, title='$\sigma$ (pA)'), 'bbox_rect': (.1, .35, .95, .85), 'ylabel': '', } _default_config['GammaScatterAllPlotter'] = GammaScatterAllPlotter_config ############################################################################## GammaFreqGridsScatterAllPlotter_config = { 'fig_size': (4.2, 2), 'dot_size': 6, 'legend_kwargs': dict(loc=(0.8, 0.4), fontsize='small', frameon=False, numpoints=1, title='$\sigma$ (pA)'), 'bbox_rect': (.1, .35, .95, .85), 'ylabel': '', 'yticks': True, } _default_config[ 'GammaFreqGridsScatterAllPlotter'] = GammaFreqGridsScatterAllPlotter_config ############################################################################## GammaScatterPBumpsAllPlotter_config = { 'fig_size': (4.5, 2.6), 'bbox_rect': (0.3, 0.22, 0.82, 0.95), 'xlabel': '', 'legend_kwargs': dict(loc=(1.05, 0.5), fontsize='small', frameon=False, title='$\sigma$ (pA)'), } _default_config[ 'GammaScatterPBumpsAllPlotter'] = GammaScatterPBumpsAllPlotter_config ############################################################################## GammaPBumpsProbabilityPlotter_config = { 'fig_size': (2.7, 2.7), # inches 'bbox_rect': (0.25, 0.2, 0.95, 0.9), } _default_config[ 'GammaPBumpsProbabilityPlotter'] = GammaPBumpsProbabilityPlotter_config ############################################################################## GammaFreqPBumpsProbabilityPlotter_config = { 'fig_size': (2.7, 2.7), # inches 'bbox_rect': (0.25, 0.2, 0.95, 0.9), } _default_config[ 'GammaFreqPBumpsProbabilityPlotter'] = GammaFreqPBumpsProbabilityPlotter_config ############################################################################## GammaGridsProbabilityPlotter_config = { 'fig_size': (2.7, 2.7), # inches 'bbox_rect': (0.25, 0.2, 0.95, 0.9), 'title_size': 'medium', } _default_config[ 'GammaGridsProbabilityPlotter'] = GammaGridsProbabilityPlotter_config ############################################################################## GammaFreqGridsProbabilityPlotter_config = { 'fig_size': (2.7, 2.7), # inches 'bbox_rect': (0.25, 0.2, 0.95, 0.9), 'title_size': 'x-small', } _default_config[ 'GammaFreqGridsProbabilityPlotter'] = GammaFreqGridsProbabilityPlotter_config ############################################################################## fracTotalText = _default_config['p_bumps']['frac_total_text'] FracTotalSweepAnnPlotter_config = { 'scale_factor': .8, 'cbar': (1, 0, 0), 'cbar_kw': dict(label=fracTotalText, location='left', shrink=0.8, pad=0.25, ticks=ti.MultipleLocator(0.5), rasterized=True) } _default_config[ 'FracTotalSweepAnnPlotter'] = FracTotalSweepAnnPlotter_config ############################################################################## MainBumpFormationPlotter_config = { 'scale_factor': 1., 'cbar': [0, 0, 1], 'cbar_kw': dict(label="P(bumps)", location='right', shrink=0.8, pad=-.05, ticks=ti.MultipleLocator(0.5), rasterized=True), 'xticks': [True] * 3, 'plot_grid_contours': [1, 1, 1], } _default_config[ 'MainBumpFormationPlotter'] = MainBumpFormationPlotter_config ############################################################################## _default_config['MainIsBumpPlotter'] = FracTotalSweepAnnPlotter_config ############################################################################## _default_config['IsBumpPlotter'] = FracTotalSweepAnnPlotter_config ############################################################################## IsBumpExamplePlotter_config = { 'bumpQualityX': -.9, 'rateColors': ['k', 'k', 'k', 'k', 'k', 'k', 'k', 'k', 'k', 'k'], #'rateColors': ['k', 'yellow', 'yellow', 'yellow', 'yellow', 'yellow', # 'yellow', 'yellow', 'yellow', 'yellow'], 'cbar_fig_size': (0.6, 0.6), } _default_config['IsBumpExamplePlotter'] = IsBumpExamplePlotter_config ############################################################################## MainScatterGridsBumpsPlotter_config = { 'fig_size': (4.5, 2.6), 'bbox_rect': (0.3, 0.22, 0.82, 0.95), 'xlabel': '', 'legend': False, 'legend_kwargs': dict(loc=(1.05, 0.5), fontsize='small', frameon=False, handletextpad=0, title='$\sigma$ (pA)'), } _default_config[ 'MainScatterGridsBumpsPlotter'] = MainScatterGridsBumpsPlotter_config ############################################################################## BumpDriftAtTimePlotter_config = { 'scale_factor': .8, 'cbar_kw': dict(label='Average bump drift\n(neurons)', location='right', shrink=0.8, pad=-0.05, ticks=ti.MultipleLocator(10), rasterized=True), 'plot_grid_contours': [1, 1, 1], } _default_config['BumpDriftAtTimePlotter'] = BumpDriftAtTimePlotter_config ############################################################################## BumpDiffAtInitPlotter_config = { 'cbar_kw': dict(label='Distance from init\nposition (neurons)', location='right', shrink=0.8, pad=-0.05, ticks=ti.MultipleLocator(10), rasterized=True) } _default_config['BumpDiffAtInitPlotter'] = BumpDiffAtInitPlotter_config ############################################################################## BumpDiffResetPlotter_config = { 'scale_factor': .8, 'cbar_kw': dict(label='Distance from reset\nposition (neurons)', location='right', shrink=0.8, pad=-0.05, ticks=ti.MultipleLocator(5), rasterized=True), 'plot_grid_contours': [1, 1, 1], } _default_config['BumpDiffResetPlotter'] = BumpDiffResetPlotter_config ############################################################################## MaxPopulationFRSweepsPlotter_config = { 'cbar': [1, 0, 0], 'cbar_kw': dict(label="$E-rate_{max}$ (Hz)", location='left', shrink=0.8, pad=0.25, ticks=ti.MultipleLocator(100), rasterized=True), 'plot_grid_contours': [1, 1, 1], 'grid_contours': [.5], } _default_config[ 'MaxPopulationFRSweepsPlotter'] = MaxPopulationFRSweepsPlotter_config ############################################################################## BumpSigmaSweepPlotter_config = { 'cbar': [0, 0, 1], 'cbar_kw': dict(label=_default_config['bump_sigma']['sigma_bump_text'], location='right', shrink=0.8, pad=-0.05, ticks=ti.MultipleLocator(0.2), rasterized=True) } _default_config['BumpSigmaSweepPlotter'] = BumpSigmaSweepPlotter_config ############################################################################## BumpExamplePlotter_config = { 'bbox': (0.01, 0.01, 0.99, 0.82), } _default_config['BumpExamplePlotter'] = BumpExamplePlotter_config ############################################################################## EIRasterPlotter_config = { 'fig_size': (3, 1.9), 'fig_ext': 'pdf', 'yticks': [1, 0, 0], 'ylabelPos': -0.35, 'scaleBar': [None, None, 25], 'scaleX': .85, 'scaleY': -.1, } _default_config['EIRasterPlotter'] = EIRasterPlotter_config ############################################################################## EIRatePlotter_config = { 'fig_size': (3, .65), 'rateTop': .9, 'ylabelPos': -0.35, } _default_config['EIRatePlotter'] = EIRatePlotter_config ############################################################################## MaxMeanThetaFRSweepPlotter_config = { 'cbar_kw': dict( label="max(E rate)/$\\theta$ cycle (Hz)", location='left', shrink=0.8, pad=0.25, ticks=ti.MultipleLocator(100), #ticks = ti.LogLocator(base=4), #format = ti.LogFormatter(4), rasterized=True) } _default_config[ 'MaxMeanThetaFRSweepPlotter'] = MaxMeanThetaFRSweepPlotter_config ############################################################################## PSeizureSweepPlotter_config = { 'FRThreshold': 300, 'plot_grid_contours': [1, 1, 1], 'grid_contours': [.5], } PSeizureSweepPlotter_config.update({ 'cbar_kw': dict(label="P($E-rate_{{max}}$ > {0})".format( PSeizureSweepPlotter_config['FRThreshold']), location='left', shrink=0.8, pad=0.25, ticks=ti.MultipleLocator(0.5), rasterized=True) }) _default_config['PSeizureSweepPlotter'] = PSeizureSweepPlotter_config ############################################################################## MaxFRGridsProbabilityPlotter_config = { 'fig_size': (2.7, 2.7), # inches 'scale_factor': .85, 'bbox_rect': (0.3, 0.22, 0.92, 0.9), } _default_config[ 'MaxFRGridsProbabilityPlotter'] = MaxFRGridsProbabilityPlotter_config ############################################################################## PSeizureGridsProbabilityPlotter_config = { 'FRThreshold': 300, 'fig_size': (2.7, 2.7), # inches 'scale_factor': .85, 'bbox_rect': (0.3, 0.22, 0.92, 0.9), } _default_config[ 'PSeizureGridsProbabilityPlotter'] = PSeizureGridsProbabilityPlotter_config ############################################################################## PSeizureGridsScatterAllPlotter_config = { 'FRThreshold': 300, 'fig_size': (2.5, 2.2), # inches 'bbox_rect': (0.3, 0.23, 0.92, 0.9), 'tight_layout_kwargs': { 'pad': .2, }, 'legend_kwargs': dict(loc=(0.5, 0.6), fontsize='small', frameon=False, numpoints=1, title='$\sigma$ (pA)'), } _default_config[ 'PSeizureGridsScatterAllPlotter'] = PSeizureGridsScatterAllPlotter_config ############################################################################## MaxFRGridsScatterAllPlotter_config = { 'fig_size': (2.5, 2.2), # inches 'bbox_rect': (0.3, 0.23, 0.92, 0.9), 'tight_layout_kwargs': { 'pad': .2, }, 'plot_legend': False, 'legend_kwargs': dict(loc=(0.6, 0.5), fontsize='small', frameon=False, numpoints=1, title='$\sigma$ (pA)'), } _default_config[ 'MaxFRGridsScatterAllPlotter'] = MaxFRGridsScatterAllPlotter_config ############################################################################## MaxStdThetaFRSweepPlotter_config = { 'cbar_kw': dict(label="max(E rate)/$\\theta$ cycle (Hz)", location='left', shrink=0.8, pad=0.25, ticks=ti.MaxNLocator(4), rasterized=True) } _default_config[ 'MaxStdThetaFRSweepPlotter'] = MaxStdThetaFRSweepPlotter_config ############################################################################## _default_config[ 'MaxMedianThetaFRSweepPlotter'] = MaxStdThetaFRSweepPlotter_config ############################################################################## VelSlopeSweepPlotter_config = { 'scale_factor': .8, 'vmin': -.472, 'vmax': 1.353, 'cbar': [0, 0, 1], 'cbar_kw': dict( location='right', shrink=0.8, pad=-0.1, label='Slope\n(neurons/s/pA)', ticks=ti.MultipleLocator(0.4), ), 'plot_contours': [1, 1, 1], } _default_config['VelSlopeSweepPlotter'] = VelSlopeSweepPlotter_config ############################################################################## VelFitErrSweepPlotter_config = { 'scale_factor': .8, 'cbar': [0, 0, 1], 'cbar_kw': dict(label='Fit error (neurons/s)', location='right', shrink=0.8, pad=-0.1, ticks=ti.MultipleLocator(2), rasterized=True), 'ylabel': [None, '', ''], 'yticks': [1, 0, 0], 'plot_contours': [1, 1, 1], 'vmin': 0, 'vmax': 11.2, } _default_config['VelFitErrSweepPlotter'] = VelFitErrSweepPlotter_config ############################################################################## VelLinesPlotter_config = { 'scale_factor': .8, 'fig_size': (3., 2), 'bbox_rect': (0.4, 0.35, 0.95, 0.65), 'positions': ((5, 15), (5, 15), (5, 15)), 'ivel_range': 11, 'g_ann': False, } _default_config['VelLinesPlotter'] = VelLinesPlotter_config ############################################################################## VelFitStdSweepPlotter_config = { 'scale_factor': .7, 'cbar_kw': dict(location='right', label='Mean $\sigma_{spd}$ (neurons/s)', shrink=0.8, pad=0.05, ticks=ti.MultipleLocator(5), extend='max', extendfrac=0.1) } _default_config['VelFitStdSweepPlotter'] = VelFitStdSweepPlotter_config ############################################################################## VelocityRasterPlotter_config = { 'fig_size': (3.75, 2.2), 'transparent': True, 'bbox': (0.2, 0.2, 0.99, 0.8) } _default_config['VelocityRasterPlotter'] = VelocityRasterPlotter_config ############################################################################## VelocityRatePlotter_config = { 'fig_size': (3.75, 1), 'bbox': (.2, .2, .99, 0.70), 'transparent': True, } _default_config['VelocityRatePlotter'] = VelocityRatePlotter_config ############################################################################## VelocityRasterZoomPlotter_config = { 'fig_size': (3.75 * .75, 1.2), 'ylabelPos': -0.22, 'bbox': (0.2, 0.25, 0.99, 0.95), 'transparent': True, } _default_config[ 'VelocityRasterZoomPlotter'] = VelocityRasterZoomPlotter_config ############################################################################## ThetaSignalPlotter_config = { 'fig_size': (3, .5), 'T': .5e3, # ms 'bbox': (0, .05, 1., .95), # l, b, r, t 'color': (0, 0, 0, .3), } _default_config['ThetaSignalPlotter'] = ThetaSignalPlotter_config ############################################################################## PACExamplePlotter_config = { 'fig_size': (5, 3.5), 'bbox': (0, .05, 1., .95), # l, b, r, t 'letter_xy': (0, 1.), 'theta_color': 'k', 'gamma_color': 'b', } _default_config['PACExamplePlotter'] = PACExamplePlotter_config ############################################################################## RasterExamplePlotter_config = { 'fig_size': (6.2, 8.3), 'sweep_rect': (.12, .73, .45, .95), 'cbar_kw': dict(label="Mean $E-rate_{max}^{\\theta}$ (Hz)", location='right', shrink=0.8, pad=.05, ticks=ti.MultipleLocator(250), rasterized=True), 'FRThreshold': 300., 'ylabelPos': -0.1, 'markersize': 1.5, 'plot_ann_txt': True, 'theta_color': (0, 0, 0, .3), 'fig_saver': SeparateMultipageSaver(None, 'pdf') } _default_config['RasterExamplePlotter'] = RasterExamplePlotter_config ############################################################################## ScatterGammaGridsSeparatePlotter_config = { 'fig_size': (5., 6.7), #'bbox_rect': (0.12, 0.17, 0.98, 0.92), } _default_config[ 'ScatterGammaGridsSeparatePlotter'] = ScatterGammaGridsSeparatePlotter_config ############################################################################## ScatterGammaFGridsSeparatePlotter_config = { 'fig_size': (5., 6.7), #'bbox_rect': (0.12, 0.17, 0.98, 0.92), } _default_config[ 'ScatterGammaFGridsSeparatePlotter'] = ScatterGammaFGridsSeparatePlotter_config ############################################################################## GridsPBumpsProbabilityPlotter_config = { 'fig_size': (2.7, 2.7), # inches 'bbox_rect': (0.25, 0.2, 0.95, 0.9), 'title_size': 'medium', } _default_config[ 'GridsPBumpsProbabilityPlotter'] = GridsPBumpsProbabilityPlotter_config ############################################################################## GridBumpScatterPlotter_config = { 'fig_size': (8.27, 11.69), 'color_box_width': .165 } GridBumpScatterPlotter_config.update({ 'color_box_coords': { 'left': 0.14, # w = 0.165 'bottom': .85, 'right': .14 + GridBumpScatterPlotter_config['color_box_width'], 'top': .95 } #'bbox_rect': (0.12, 0.17, 0.98, 0.92), }) _default_config['GridBumpScatterPlotter'] = GridBumpScatterPlotter_config ############################################################################## GridSimpleExamplePlotter_config = { 'fig_size': (5.4, 2.5), 'transparent': True, 'ns_idx': 0, 'rc': (25, 2), 'trial_no': 0, } _default_config[ 'GridSimpleExamplePlotter'] = GridSimpleExamplePlotter_config ############################################################################## Burak2009ConnectionPlotter_config = { 'fig_size': (2, 2), } _default_config[ 'Burak2009ConnectionPlotter'] = Burak2009ConnectionPlotter_config ############################################################################## FRSweepPlotter_config = { 'scale_factor': .8, 'cbar_kw': { 'location': 'right', # This has to match cbar_kw_e and cbar_kw_i }, 'plot_grid_contours': [1, 1, 1], 'cbar_kw_e': { 'label': 'Mean E Firing rate (Hz)', 'location': 'right', 'shrink': 0.8, 'pad': -0.05, 'ticks': ti.LogLocator(subs=[1, 2, 4, 6, 8]), 'rasterized': True, }, 'cbar_kw_i': { 'label': 'Mean I Firing rate (Hz)', 'location': 'right', 'shrink': 0.8, 'pad': -0.05, 'ticks': ti.LogLocator(subs=[1, 2, 4, 6, 8]), 'rasterized': True, }, } _default_config['FRSweepPlotter'] = FRSweepPlotter_config ############################################################################## ScatterGridsFRAllPlotter_config = { 'fig_size': (4.2, 3), 'dot_size': 6, 'legend_kwargs': dict(loc=(0.4, 0.6), fontsize='small', frameon=False, numpoints=1, title='$\sigma$ (pA)'), 'bbox_rect': (.2, .2, .95, .95), 'ylabel': 'Gridness score', 'yticks': True, } _default_config[ 'ScatterGridsFRAllPlotter'] = ScatterGridsFRAllPlotter_config ########################################################################### GridsVelFitErrProbabilityPlotter_config = { 'fig_size': (2.7, 2.7), # inches 'bbox_rect': (0.25, 0.2, 0.95, 0.9), 'title_size': 'medium', 'data_range': [[0, 11.2], [-.5, 1.2]], } _default_config[ 'GridsVelFitErrProbabilityPlotter'] = GridsVelFitErrProbabilityPlotter_config ########################################################################### _default_config.update({ 'WeightOutE2IPlotter': { 'fig_size': (1.75, 1.75), 'g_idx': 15, 'neuron_idx': 527, 'use_title': False, }, 'WeightOutI2EPlotter': { 'fig_size': (1.75, 1.75), 'g_idx': 15, 'neuron_idx': 527, 'use_title': False, }, 'WeightInE2IPlotter': { 'fig_size': (1.75, 1.75), 'g_idx': 15, 'neuron_idx': 527, 'use_title': False, }, 'WeightInI2EPlotter': { 'fig_size': (1.75, 1.75), 'g_idx': 15, 'neuron_idx': 527, 'use_title': False, }, }) _default_config['WeightGridPlotter'] = { 'fig_size': (3, 3), 'cbar_fig_size': (1, 0.5), 'bbox_rect': (.1, .1, .9, .9), 'g_idx': 15, 'neuron_idx': 527, } _default_config['GridExampleColorbarPlotter'] = { 'fig_size': (0.6, 0.8), } _default_config['BumpExampleColorbarPlotter'] = { 'fig_size': (0.6, 0.6), } ########################################################################### _default_config['HighGridScoreFraction'] = { 'threshold': .5, } ############################################################################## return _default_config
def oggm_static_paths(): """Initialise the OGGM paths from the config file.""" global PATHS, PARAMS # See if the file is there, if not create it if not os.path.exists(CONFIG_FILE): dldir = os.path.join(os.path.expanduser('~'), 'OGGM') config = ConfigObj() config['dl_cache_dir'] = os.path.join(dldir, 'download_cache') config['dl_cache_readonly'] = False config['tmp_dir'] = os.path.join(dldir, 'tmp') config['cru_dir'] = os.path.join(dldir, 'cru') config['rgi_dir'] = os.path.join(dldir, 'rgi') config['test_dir'] = os.path.join(dldir, 'tests') config['has_internet'] = True config.filename = CONFIG_FILE config.write() # OK, read in the file try: config = ConfigObj(CONFIG_FILE, file_error=True) except (ConfigObjError, IOError) as e: log.critical('Config file could not be parsed (%s): %s', CONFIG_FILE, e) sys.exit() # Check that all keys are here for k in [ 'dl_cache_dir', 'dl_cache_readonly', 'tmp_dir', 'cru_dir', 'rgi_dir', 'test_dir', 'has_internet' ]: if k not in config: raise RuntimeError('The oggm config file ({}) should have an ' 'entry for {}.'.format(CONFIG_FILE, k)) # Override defaults with env variables if available if os.environ.get('OGGM_DOWNLOAD_CACHE_RO') is not None: ro = bool(strtobool(os.environ.get('OGGM_DOWNLOAD_CACHE_RO'))) config['dl_cache_readonly'] = ro if os.environ.get('OGGM_DOWNLOAD_CACHE') is not None: config['dl_cache_dir'] = os.environ.get('OGGM_DOWNLOAD_CACHE') if os.environ.get('OGGM_EXTRACT_DIR') is not None: # This is for the directories where OGGM needs to extract things # On the cluster it might be useful to do it on a fast disc edir = os.path.abspath(os.environ.get('OGGM_EXTRACT_DIR')) config['tmp_dir'] = os.path.join(edir, 'tmp') config['cru_dir'] = os.path.join(edir, 'cru') config['rgi_dir'] = os.path.join(edir, 'rgi') if not config['dl_cache_dir']: raise RuntimeError('At the very least, the "dl_cache_dir" entry ' 'should be provided in the oggm config file ' '({})'.format(CONFIG_FILE, k)) # Fill the PATH dict for k, v in config.iteritems(): if not k.endswith('_dir'): continue PATHS[k] = os.path.abspath(os.path.expanduser(v)) # Other PARAMS['has_internet'] = config.as_bool('has_internet') PARAMS['dl_cache_readonly'] = config.as_bool('dl_cache_readonly') # Create cache dir if possible if not os.path.exists(PATHS['dl_cache_dir']): if not PARAMS['dl_cache_readonly']: os.makedirs(PATHS['dl_cache_dir'])
class ResponderSettings(PumpkinModule): def __init__(self,parent=None): super(ResponderSettings, self).__init__(parent) self.setWindowTitle('Responder Plugin settings') self.setGeometry(0,0,480, 500) self.main = QVBoxLayout() self.THeaders = {'Config':[],'Value':[] } self.userConfig = ConfigObj(str(self.configure.Settings.get_setting('plugins','responder_config'))) self.userConfig.interpolation = False self.loadtheme(self.configure.XmlThemeSelected()) self.center() self.GUI() def addRowTableWidget(self, _key, _value): ''' add items into TableWidget ''' Headers = [] self.THeaders['Config'].append(_key) self.THeaders['Value'].append(_value) for n, key in enumerate(self.THeaders.keys()): Headers.append(key) for m, item in enumerate(self.THeaders[key]): item = QTableWidgetItem(item) item.setFlags(item.flags() | Qt.ItemIsEditable) self.TabSettings.setItem(m, n, item) self.TabSettings.resizeColumnToContents(0) def getAllRowTablesWidget(self): ''' dump all setting into table for list''' model = self.TabSettings.model() data,datafilter = [],[] for row in range(model.rowCount()): data.append([]) for column in range(model.columnCount()): index = model.index(row, column) data[row].append(str(model.data(index).toString())) for key,item in data: datafilter.append(key) datafilter.append(item) return datafilter def addAllconfigKeys(self): ''' get all settings and add into table''' for key in self.userConfig.keys(): for items in self.userConfig[key].items(): self.addRowTableWidget(items[0],items[1]) def checkConfigKeysResponder(self,saveObjct=False,count=False): ''' check number row and save settings ''' if count: lenconfig = 0 for key in self.userConfig.keys(): for items in self.userConfig[key].items(): lenconfig += 1 return lenconfig if saveObjct: settings = self.getAllRowTablesWidget() for key in self.userConfig.keys(): for items in self.userConfig[key].items(): self.userConfig[key][items[0]] = settings[settings.index(items[0])+1] self.userConfig.write() def saveConfigObject(self): self.checkConfigKeysResponder(saveObjct=True) QMessageBox.information(self,'Responder settings','All settings in {} has been saved ' 'with success.'.format(str(self.configure.Settings.get_setting('plugins','responder_config')))) self.close() def GUI(self): self.TabSettings = QTableWidget(self.checkConfigKeysResponder(count=True),2) self.btnSave = QPushButton('Save settings') self.GroupBox = QGroupBox(self) self.widget = QWidget() self.layoutGroup = QVBoxLayout(self.widget) self.GroupBox.setLayout(self.layoutGroup) self.GroupBox.setTitle('Options') self.addAllconfigKeys() self.btnSave.clicked.connect(self.saveConfigObject) self.TabSettings.resizeRowsToContents() self.TabSettings.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred) self.TabSettings.horizontalHeader().setStretchLastSection(True) self.TabSettings.setSelectionBehavior(QAbstractItemView.SelectRows) #self.TabSettings.setEditTriggers(QAbstractItemView.NoEditTriggers) self.TabSettings.verticalHeader().setVisible(False) self.TabSettings.setHorizontalHeaderLabels(self.THeaders.keys()) self.TabSettings.verticalHeader().setDefaultSectionSize(23) self.layout = QVBoxLayout(self.widget) self.layoutGroup.addWidget(self.TabSettings) self.layout.addWidget(self.GroupBox) self.layout.addWidget(self.btnSave) self.main.addWidget(self.widget) self.setLayout(self.main)
def base_config(config_path: Path, config=None): global_params = dict( regions='force_list(default=list())', webhook_target='string(default="")', webhook_header='header(default="", url=webhook_target)', webhook_image='boolean(default=yes)', webhook_image_type='option("vehicle", "original", default="vehicle")', max_prediction_delay='float(default=6)', memory_decay='float(default=300)', image_format= 'string(default="$(camera)_screenshots/%y-%m-%d/%H-%M-%S.%f.jpg")', sample='integer(default=2)', total='integer(default=-1)', mmc='boolean(default=no)', csv_file='string(default="")', jsonlines_file='string(default="")', ) camera = dict( url='string', name='string', active='boolean(default=yes)', # Overridable regions='force_list(default=None)', webhook_target='string(default=None)', webhook_header='header(default=None, url=webhook_target)', webhook_image='boolean(default=None)', webhook_image_type='option("vehicle", "original", default=None)', max_prediction_delay='float(default=None)', memory_decay='float(default=None)', image_format='string(default=None)', sample='integer(default=None)', total='integer(default=None)', mmc='boolean(default=None)', csv_file='string(default=None)', jsonlines_file='string(default=None)', ) def webhook_header_check(value, *args, **kwargs): token = value.split('Token ')[-1] if not token: return None url = 'https://app.parkpow.com/api/v1/parking-list' headers = {'Authorization': f'Token {token}'} try: response = requests.get(url, headers=headers, timeout=10) except (requests.Timeout, requests.ConnectionError): raise ValidateError('Please check your internet connection.') if response.status_code != 200: raise ValidateError('Wrong token.') return value spec = ConfigObj() spec['timezone'] = 'string(default="UTC")' spec['version'] = 'integer(default=2)' spec['cameras'] = dict(__many__=camera, **global_params) if not config_path.exists(): with open(config_path, 'w') as fp: fp.write(DEFAULT_CONFIG.replace('\n', '\r\n')) try: config = ConfigObj(config.split('\n') if config else str(config_path), configspec=spec, raise_errors=True, indent_type=' ') config.newlines = '\r\n' # For Windows except Exception as e: logging.error(e) return None, str(e) validator = Validator({'header': webhook_header_check}) result = config.validate(validator, preserve_errors=True) errors = flatten_errors(config, result) if errors: error_message = 'Config errors:' for section_list, key, error in errors: if error is False: error = 'key %s is missing.' % key elif key is not None: section_list.append(key) section_string = '/'.join(section_list) logging.error('%s: %s', section_string, error) error = f'{section_string}, param: {key}, message: {error}' error_message += f'\n{error}' return None, error_message return config, None
sickbeard.FLATTEN_FOLDERS_DEFAULT = 0 sickbeard.NAMING_PATTERN = '' sickbeard.NAMING_ABD_PATTERN = '' sickbeard.NAMING_SPORTS_PATTERN = '' sickbeard.NAMING_MULTI_EP = 1 sickbeard.PROVIDER_ORDER = ["sick_beard_index"] sickbeard.newznabProviderList = providers.getNewznabProviderList("'Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0|eponly|0|0|0!!!NZBs.org|https://nzbs.org/||5030,5040,5060,5070,5090|0|eponly|0|0|0!!!Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040,5060|0|eponly|0|0|0'") sickbeard.providerList = providers.makeProviderList() sickbeard.PROG_DIR = os.path.abspath(os.path.join(TESTDIR, '..')) sickbeard.DATA_DIR = TESTDIR sickbeard.CONFIG_FILE = os.path.join(sickbeard.DATA_DIR, "config.ini") sickbeard.CFG = ConfigObj(sickbeard.CONFIG_FILE) sickbeard.BRANCG = sickbeard.config.check_setting_str(sickbeard.CFG, 'General', 'branch', '') sickbeard.CUR_COMMIT_HASH = sickbeard.config.check_setting_str(sickbeard.CFG, 'General', 'cur_commit_hash', '') sickbeard.GIT_USERNAME = sickbeard.config.check_setting_str(sickbeard.CFG, 'General', 'git_username', '') sickbeard.GIT_PASSWORD = sickbeard.config.check_setting_str(sickbeard.CFG, 'General', 'git_password', '', censor_log=True) sickbeard.LOG_DIR = os.path.join(TESTDIR, 'Logs') sickbeard.logger.logFile = os.path.join(sickbeard.LOG_DIR, 'test_sickbeard.log') createTestLogFolder() sickbeard.CACHE_DIR = os.path.join(TESTDIR, 'cache') createTestCacheFolder() sickbeard.logger.initLogging(False, True)
class BDFProxySettings(PumpkinModule): def __init__(self,parent=None): super(BDFProxySettings, self).__init__(parent) self.setWindowTitle('DBFProxy-ng Plugin settings') self.setGeometry(0,0,480, 500) self.main = QVBoxLayout() self.THeaders = {'Config':[],'Value':[] } self.userConfig = ConfigObj(str(self.configure.Settings.get_setting('plugins','bdfproxy_config'))) self.userConfig.interpolation = False self.loadtheme(self.configure.XmlThemeSelected()) self.center() self.GUI() def addRowTableWidget(self, _key, _value): ''' add items into TableWidget ''' Headers = [] self.THeaders['Config'].append(_key) self.THeaders['Value'].append(_value) for n, key in enumerate(self.THeaders.keys()): Headers.append(key) for m, item in enumerate(self.THeaders[key]): item = QTableWidgetItem(item) item.setFlags(item.flags() | Qt.ItemIsEditable) self.TabSettings.setItem(m, n, item) self.TabSettings.resizeColumnToContents(0) def getAllRowTablesWidget(self): ''' dump all settings from table ''' model = self.TabSettings.model() data,datafilter,self.key = [],OrderedDict(),None for row in range(model.rowCount()): data.append([]) for column in range(model.columnCount()): index = model.index(row, column) data[row].append(str(model.data(index).toString())) datafilter['ESP'] = {} datafilter['LinuxIntelx86'] = {} datafilter['LinuxIntelx64'] = {} datafilter['WindowsIntelx86'] = {} datafilter['WindowsIntelx64'] = {} datafilter['MachoIntelx86'] = {} datafilter['MachoIntelx64'] = {} for count,item in enumerate(data): if count < 5: if item[0] != '' or item[1] != '': datafilter['ESP'][item[0]] = item[1] else: if item[0] != '' or item[1] != '': if item[1] in datafilter.keys(): self.key = item[1] else: datafilter[self.key][item[0]] = item[1] return datafilter def saveConfigObject(self): self.checkConfigKeysBDFProxy(saveObjct=True) QMessageBox.information(self,'BDFProxy-ng settings','All settings in {} has been saved ' 'with success.'.format(str(self.configure.Settings.get_setting('plugins','bdfproxy_config')))) self.close() def checkConfigKeysBDFProxy(self,saveObjct=False): ''' save all change into file.conf ''' if saveObjct: changedData = self.getAllRowTablesWidget() for target in self.userConfig['targets'].keys(): if target == 'ALL': for item in self.userConfig['targets']['ALL']: if type(self.userConfig['targets']['ALL'][item]) == str: if saveObjct: self.userConfig['targets']['ALL'][item] = changedData['ESP'][item] else: self.addRowTableWidget(item,self.userConfig['targets']['ALL'][item]) elif type(self.userConfig['targets']['ALL'][item]) == Section: if saveObjct: self.userConfig['targets']['ALL'][item] = changedData[item] else: self.addRowTableWidget('-'*35+'>',item) for key in self.userConfig['targets']['ALL'][item]: self.addRowTableWidget(key,self.userConfig['targets']['ALL'][item][key]) if saveObjct: self.userConfig.write() def GUI(self): self.TabSettings = QTableWidget(50,2) self.btnSave = QPushButton('Save settings') self.GroupBox = QGroupBox(self) self.widget = QWidget() self.layoutGroup = QVBoxLayout(self.widget) self.GroupBox.setLayout(self.layoutGroup) self.GroupBox.setTitle('Options') self.checkConfigKeysBDFProxy() self.btnSave.clicked.connect(self.saveConfigObject) self.TabSettings.resizeRowsToContents() self.TabSettings.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred) self.TabSettings.horizontalHeader().setStretchLastSection(True) self.TabSettings.setSelectionBehavior(QAbstractItemView.SelectRows) #self.TabSettings.setEditTriggers(QAbstractItemView.NoEditTriggers) self.TabSettings.verticalHeader().setVisible(False) self.TabSettings.setHorizontalHeaderLabels(self.THeaders.keys()) self.TabSettings.verticalHeader().setDefaultSectionSize(23) self.layout = QVBoxLayout(self.widget) self.layoutGroup.addWidget(self.TabSettings) self.layout.addWidget(self.GroupBox) self.layout.addWidget(self.btnSave) self.main.addWidget(self.widget) self.setLayout(self.main)
class Config(object): """ Wraps access to particular values in a config file """ def __init__(self, config_file): """ Initialize the config with values from a file """ self._config_file = config_file self._config = ConfigObj(self._config_file, encoding='utf-8') for key in _CONFIG_DEFINITIONS.keys(): self.check_setting(key) self.ENCODER_MULTICORE_COUNT = max(0, self.ENCODER_MULTICORE_COUNT) self._upgrade() def _define(self, name): key = name.upper() ini_key = name.lower() definition = _CONFIG_DEFINITIONS[key] if len(definition) == 3: definition_type, section, default = definition else: definition_type, section, _, default = definition return key, definition_type, section, ini_key, default def check_section(self, section): """ Check if INI section exists, if not create it """ if section not in self._config: self._config[section] = {} return True else: return False def check_setting(self, key): """ Cast any value in the config to the right type or use the default """ key, definition_type, section, ini_key, default = self._define(key) self.check_section(section) try: my_val = definition_type(self._config[section][ini_key]) except Exception: my_val = definition_type(default) self._config[section][ini_key] = my_val return my_val def write(self): """ Make a copy of the stored config and write it to the configured file """ new_config = ConfigObj(encoding="UTF-8") new_config.filename = self._config_file # first copy over everything from the old config, even if it is not # correctly defined to keep from losing data for key, subkeys in self._config.items(): if key not in new_config: new_config[key] = {} for subkey, value in subkeys.items(): new_config[key][subkey] = value # next make sure that everything we expect to have defined is so for key in _CONFIG_DEFINITIONS.keys(): key, definition_type, section, ini_key, default = self._define(key) self.check_setting(key) if section not in new_config: new_config[section] = {} new_config[section][ini_key] = self._config[section][ini_key] # Write it to file headphones.logger.info("Writing configuration to file") try: new_config.write() except IOError as e: headphones.logger.error("Error writing configuration file: %s", e) def get_extra_newznabs(self): """ Return the extra newznab tuples """ extra_newznabs = list( itertools.izip(*[ itertools.islice(self.EXTRA_NEWZNABS, i, None, 3) for i in range(3) ])) return extra_newznabs def clear_extra_newznabs(self): """ Forget about the configured extra newznabs """ self.EXTRA_NEWZNABS = [] def add_extra_newznab(self, newznab): """ Add a new extra newznab """ extra_newznabs = self.EXTRA_NEWZNABS for item in newznab: extra_newznabs.append(item) self.EXTRA_NEWZNABS = extra_newznabs def __getattr__(self, name): """ Returns something from the ini unless it is a real property of the configuration object or is not all caps. """ if not re.match(r'[A-Z_]+$', name): return super(Config, self).__getattr__(name) else: return self.check_setting(name) def __setattr__(self, name, value): """ Maps all-caps properties to ini values unless they exist on the configuration object. """ if not re.match(r'[A-Z_]+$', name): super(Config, self).__setattr__(name, value) return value else: key, definition_type, section, ini_key, default = self._define( name) self._config[section][ini_key] = definition_type(value) return self._config[section][ini_key] def process_kwargs(self, kwargs): """ Given a big bunch of key value pairs, apply them to the ini. """ for name, value in kwargs.items(): key, definition_type, section, ini_key, default = self._define( name) self._config[section][ini_key] = definition_type(value) def _upgrade(self): """ Update folder formats in the config & bump up config version """ if self.CONFIG_VERSION == '0': from headphones.helpers import replace_all file_values = { 'tracknumber': 'Track', 'title': 'Title', 'artist': 'Artist', 'album': 'Album', 'year': 'Year' } folder_values = { 'artist': 'Artist', 'album': 'Album', 'year': 'Year', 'releasetype': 'Type', 'first': 'First', 'lowerfirst': 'first' } self.FILE_FORMAT = replace_all(self.FILE_FORMAT, file_values) self.FOLDER_FORMAT = replace_all(self.FOLDER_FORMAT, folder_values) self.CONFIG_VERSION = '1' if self.CONFIG_VERSION == '1': from headphones.helpers import replace_all file_values = { 'Track': '$Track', 'Title': '$Title', 'Artist': '$Artist', 'Album': '$Album', 'Year': '$Year', 'track': '$track', 'title': '$title', 'artist': '$artist', 'album': '$album', 'year': '$year' } folder_values = { 'Artist': '$Artist', 'Album': '$Album', 'Year': '$Year', 'Type': '$Type', 'First': '$First', 'artist': '$artist', 'album': '$album', 'year': '$year', 'type': '$type', 'first': '$first' } self.FILE_FORMAT = replace_all(self.FILE_FORMAT, file_values) self.FOLDER_FORMAT = replace_all(self.FOLDER_FORMAT, folder_values) self.CONFIG_VERSION = '2' if self.CONFIG_VERSION == '2': # Update the config to use direct path to the encoder rather than the encoder folder if self.ENCODERFOLDER: self.ENCODER_PATH = os.path.join(self.ENCODERFOLDER, self.ENCODER) self.CONFIG_VERSION = '3' if self.CONFIG_VERSION == '3': # Update the BLACKHOLE option to the NZB_DOWNLOADER format if self.BLACKHOLE: self.NZB_DOWNLOADER = 2 self.CONFIG_VERSION = '4' # Enable Headphones Indexer if they have a VIP account if self.CONFIG_VERSION == '4': if self.HPUSER and self.HPPASS: self.HEADPHONES_INDEXER = True self.CONFIG_VERSION = '5' if self.CONFIG_VERSION == '5': if self.OPEN_MAGNET_LINKS: self.MAGNET_LINKS = 2 self.CONFIG_VERSION = '5'
# Minimal tests RUN_SLOW_TESTS = False # quick n dirty method to see if internet is on try: _ = urlopen('http://www.google.com', timeout=1) HAS_INTERNET = True except URLError: HAS_INTERNET = False # check if there is a credentials file (should be added to .gitignore) cred_path = os.path.abspath(os.path.join(__file__, "../../..", '.credentials')) if os.path.exists(cred_path): HAS_CREDENTIALS = True try: cred = ConfigObj(cred_path) except ConfigObjError: raise else: HAS_CREDENTIALS = False cred = None def requires_internet(test): # Test decorator msg = 'requires internet' return test if HAS_INTERNET else unittest.skip(msg)(test) def requires_py3(test): # Test decorator
import logging from bot.bot import OSMbot from flask import Flask, request, current_app from bot import Osmbot from configobj import ConfigObj import os from raven.contrib.flask import Sentry application = Flask(__name__) application.debug = True Osmbot(application, '') config = ConfigObj('bot.conf') token = config['token'] bot = OSMbot(token) if 'sentry_dsn' in config: application.config['sentry_dsn'] = config['sentry_dsn'] sentry = Sentry(application, dsn=config['sentry_dsn']) sentry.captureMessage('OSMBot started', level=logging.INFO) application.sentry = sentry f = open('nginx.crt', 'r') cert_data = f.read() f.close() webhook = os.path.join(config['webhook'], config['token']) application.logger.debug('webhook:%s', config['webhook']) response = bot.setWebhook(webhook, cert_data) application.logger.debug('response:%s', response)
str(speck.ycentroid))) ax2.axvline(speck.null_phase) print speck.null_phase plt.draw() plt.pause(0.1) plt.cla() plt.close() if __name__ == "__main__": #configfilename = 'speckle_null_config.ini' #config = ConfigObj(configfilename) configfilename = 'speckle_null_config.ini' hardwareconfigfile = 'speckle_instruments.ini' configspecfile = 'speckle_null_config.spec' config = ConfigObj(configfilename, configspec=configspecfile) val = Validator() check = config.validate(val) pharo = hardware.PHARO_COM('PHARO', configfile = hardwareconfigfile) p3k = hardware.P3K_COM('P3K_COM', configfile = hardwareconfigfile) im_params= config['IM_PARAMS'] null_params = config['NULLING'] abc = config['INTENSITY_CAL']['abc'] bgds = flh.setup_bgd_dict(config) controlregion = pf.open(config['CONTROLREGION']['filename'])[0].data #Notes==>scale exptime in snr exp = config['INTENSITY_CAL']['exptime'] #Setup initial_flatmap = p3k.grab_current_flatmap()
def save(self, dirname=""): """ Saves the project as a directory named **dirname**. If **dirname** is not provided, the **project_save_path** attribute should already be set. If **project_save_path** is not set, then it will be set to **dirname**. """ if dirname == "": dirname = self.project_save_path else: self.project_save_path = dirname # Do some error checking if dirname == "": raise IOError("Cannot save project to empty path.") elif os.path.isfile(dirname): raise IOError('Cannot save project to directory "%s"; ' 'file exists.' % dirname) logger.info('Starting save of project to "%s"' % dirname) if not os.path.isdir(dirname): logger.info(' Creating directory "%s"' % dirname) os.mkdir(dirname) # TODO: We should formalize this dependency at some point and move # this import to the top level from configobj import ConfigObj config = ConfigObj() config["Project"] = {} exp = {} for i, experiment in enumerate(self.experiments): if experiment.name is not None and experiment.name != "": safename = self._encode_name(experiment.name) else: safename = "Experiment_%d" % i exp[safename] = experiment.save(basename=dirname, dirname=safename) logger.info(' Saved experiment "%s" to subdir "%s"' % \ (experiment.name, safename)) config["Experiments"] = exp if self.active_experiment is not None: config["Project"]["active_experiment"] = self.active_experiment.name contexts = {} for i, ctx in enumerate(self.contexts): if ctx.name is not None and ctx.name != "": safename = self._encode_name(ctx.name) else: safename = "Context_%d" % i filename = safename + ".pickle" contexts[safename] = dict(name=ctx.name, file=filename) ctx.save(join(dirname, filename)) logger.info(' Saved context "%s" to file "%s"' % \ (ctx.name, filename)) config["Contexts"] = contexts config.filename = join(dirname, self.PROJECT_FILE_NAME) logger.info(' Writing project to "%s"' % config.filename) config.write() logger.info('Finished saving project to "%s"' % dirname) return
import subprocess import glob import shutil import xml.etree.ElementTree as ET from configobj import ConfigObj # load config file CONFIG_FILE = os.path.realpath( os.path.join( os.path.dirname( os.path.dirname( __file__) ), "TrovaWrapper_config.ini" ) ) if not os.path.exists( CONFIG_FILE ) : msg = str('The TrovaWrapper config.ini file "' + CONFIG_FILE + '" is missing. Analysis cannot proceed...') #LogAndEmail( msg, 4) logging.critical( msg ) sys.exit() #print "CONFIG_FILE: " + CONFIG_FILE + "\n" #Config = ConfigParser.ConfigParser() Config = ConfigObj(CONFIG_FILE, raise_errors=True) # load global section #for key, value in Config.iteritems() : # print "KEY: " + str(key) + " VALUE: " + str(value) + "\n" # if 'global' in str(key): globalOptions = Config['global'] UNIFLOW_URL = globalOptions['UNIFLOW_URL'] TROVAEMON_ID = globalOptions['TROVAEMON_ID'] TROVAEMON_PWD = globalOptions['TROVAEMON_PWD'] MAX_RETRY = globalOptions['MAX_RETRY'] ''' Returns a list of paths to MiSeq repositories ''' def Repositories() : # get repo info