import requests from bs4 import BeautifulSoup import operator from configobj import ConfigObj config = ConfigObj("config.ini") my_mini_profile = config["my_mini_profile"] HOMEPAGE_URL = config["HOMEPAGE_URL"] LOGIN_URL = config["LOGIN_URL"] username = config["username"] password = config["password"] client = requests.Session() html = client.get(HOMEPAGE_URL).content soup = BeautifulSoup(html, "html5lib") csrf = soup.find(id="loginCsrfParam-login")['value'] login_information = { 'session_key': username, 'session_password': password, 'loginCsrfParam': csrf, } client.post(LOGIN_URL, data=login_information) def getProfileIdentifiers(company, cutoff=20): profile_identifiers = set()
def test_no_parent(tmpdir, specpath): ini = tmpdir.join('config.ini') ini.write('[[haha]]') with pytest.raises(NestingError): conf = ConfigObj(str(ini), configspec=specpath, file_error=True)
import os import shutil import sys from configobj import ConfigObj import json from distutils.util import strtobool #path_model = './' #path_model = sys.argv[1] #print (sys.argv) #os.chdir(path_model) startTime = datetime.now() # get runtime params from config file config = ConfigObj('runtime_params.ini') parallel_mode = bool(strtobool(config['parallel_mode'])) model_mode = config['model_mode'] short_test = int(config['short_test']) print_log = bool(strtobool(config['print_log'])) seed = int(config['seed']) scenario_name = config[ 'scenario_name'] #scenarios provide information on infrastructural plans flow_input_type = config['flow_input_type'] flow_input_source = config['flow_input_source'] total_sensitivity_factors = int(config['total_sensitivity_factors']) sensitivity_sample_file = config['sensitivity_sample_file'] output_list = config['output_list'] output_directory = config['output_directory'] clean_output = bool(strtobool(config['clean_output'])) save_full = bool(strtobool(config['save_full']))
def get_config(config_path=None): """reads the config file, validates it and return a config dict :param config_path: path to a custom config file, if none is given the default locations will be searched :type config_path: str :returns: configuration :rtype: dict """ if config_path is None: config_path = _find_configuration_file() logger.debug('using the config file at {}'.format(config_path)) try: user_config = ConfigObj( config_path, configspec=SPECPATH, interpolation=False, file_error=True, ) except ConfigObjError as error: logger.fatal('parsing the config file file with the following error: ' '{}'.format(error)) logger.fatal('if you recently updated khal, the config file format ' 'might have changed, in that case please consult the ' 'CHANGELOG or other documentation') raise CannotParseConfigFileError() fdict = { 'timezone': is_timezone, 'expand_path': expand_path, 'expand_db_path': expand_db_path, 'weeknumbers': weeknumber_option, 'color': is_color, } validator = Validator(fdict) results = user_config.validate(validator, preserve_errors=True) abort = False for section, subsection, error in flatten_errors(user_config, results): abort = True if isinstance(error, Exception): logger.fatal('config error:\n' 'in [{}] {}: {}'.format(section[0], subsection, error)) else: for key in error: if isinstance(error[key], Exception): logger.fatal('config error:\nin {} {}: {}'.format( sectionize(section + [subsection]), key, str(error[key]))) if abort or not results: raise InvalidSettingsError() config_checks(user_config) extras = get_extra_values(user_config) for section, value in extras: if section == (): logger.warn('unknown section "{}" in config file'.format(value)) else: section = sectionize(section) logger.warn('unknown key or subsection "{}" in ' 'section "{}"'.format(value, section)) return user_config
def conf(inipath, specpath): return ConfigObj(inipath, configspec=specpath)
def reloadConfig(self): try: self.config = ConfigObj("./config/mitmf.conf") except Exception as e: mitmf_logger.error("Error reloading config file: {}".format(e)) pass
from logHandler import logger logging = logger.getChild('core.config') import os import confspecs import paths import application from UserDict import UserDict from configobj import ConfigObj, ParseError from validate import Validator, VdtValueError configFile = paths.data_path(application.name + ".ini") confspec = ConfigObj(confspecs.defaults, list_values=False, encoding="UTF-8") confspec.newlines = "\r\n" conf = None class ConfigurationResetException(Exception): pass class Configuration(UserDict): def __init__(self, file=None, spec=None, *args, **kwargs): self.file = file self.spec = spec self.validator = Validator() self.setup_config(file=file, spec=spec) self.validated = self.config.validate(self.validator, copy=True) if self.validated: self.write() UserDict.__init__(self, self.config)
speechManager = boolean(default=false) synthDriver = boolean(default=false) nvwave = boolean(default=false) annotations = boolean(default=false) [uwpOcr] language = string(default="") [upgrade] newLaptopKeyboardLayout = boolean(default=false) [editableText] caretMoveTimeoutMs = integer(min=0, max=2000, default=100) [development] enableScratchpadDir = boolean(default=false) [featureFlag] # 0:default, 1:yes, 2:no cancelExpiredFocusSpeech = integer(0, 2, default=0) # 0:Only in test versions, 1:yes playErrorSound = integer(0, 1, default=0) """ #: The configuration specification #: @type: ConfigObj confspec = ConfigObj(StringIO(configSpecString), list_values=False, encoding="UTF-8") confspec.newlines = "\r\n"
str(speck.ycentroid))) ax2.axvline(speck.null_phase) print speck.null_phase plt.draw() plt.pause(0.1) plt.cla() plt.close() if __name__ == "__main__": #configfilename = 'speckle_null_config.ini' #config = ConfigObj(configfilename) configfilename = 'speckle_null_config.ini' hardwareconfigfile = 'speckle_instruments.ini' configspecfile = 'speckle_null_config.spec' config = ConfigObj(configfilename, configspec=configspecfile) val = Validator() check = config.validate(val) pharo = hardware.PHARO_COM('PHARO', configfile = hardwareconfigfile) p3k = hardware.P3K_COM('P3K_COM', configfile = hardwareconfigfile) im_params= config['IM_PARAMS'] null_params = config['NULLING'] abc = config['INTENSITY_CAL']['abc'] bgds = flh.setup_bgd_dict(config) controlregion = pf.open(config['CONTROLREGION']['filename'])[0].data #Notes==>scale exptime in snr exp = config['INTENSITY_CAL']['exptime'] #Setup initial_flatmap = p3k.grab_current_flatmap()
import logging from bot.bot import OSMbot from flask import Flask, request, current_app from bot import Osmbot from configobj import ConfigObj import os from raven.contrib.flask import Sentry application = Flask(__name__) application.debug = True Osmbot(application, '') config = ConfigObj('bot.conf') token = config['token'] bot = OSMbot(token) if 'sentry_dsn' in config: application.config['sentry_dsn'] = config['sentry_dsn'] sentry = Sentry(application, dsn=config['sentry_dsn']) sentry.captureMessage('OSMBot started', level=logging.INFO) application.sentry = sentry f = open('nginx.crt', 'r') cert_data = f.read() f.close() webhook = os.path.join(config['webhook'], config['token']) application.logger.debug('webhook:%s', config['webhook']) response = bot.setWebhook(webhook, cert_data) application.logger.debug('response:%s', response)
import subprocess import glob import shutil import xml.etree.ElementTree as ET from configobj import ConfigObj # load config file CONFIG_FILE = os.path.realpath( os.path.join( os.path.dirname( os.path.dirname( __file__) ), "TrovaWrapper_config.ini" ) ) if not os.path.exists( CONFIG_FILE ) : msg = str('The TrovaWrapper config.ini file "' + CONFIG_FILE + '" is missing. Analysis cannot proceed...') #LogAndEmail( msg, 4) logging.critical( msg ) sys.exit() #print "CONFIG_FILE: " + CONFIG_FILE + "\n" #Config = ConfigParser.ConfigParser() Config = ConfigObj(CONFIG_FILE, raise_errors=True) # load global section #for key, value in Config.iteritems() : # print "KEY: " + str(key) + " VALUE: " + str(value) + "\n" # if 'global' in str(key): globalOptions = Config['global'] UNIFLOW_URL = globalOptions['UNIFLOW_URL'] TROVAEMON_ID = globalOptions['TROVAEMON_ID'] TROVAEMON_PWD = globalOptions['TROVAEMON_PWD'] MAX_RETRY = globalOptions['MAX_RETRY'] ''' Returns a list of paths to MiSeq repositories ''' def Repositories() : # get repo info
def save(self, dirname=""): """ Saves the project as a directory named **dirname**. If **dirname** is not provided, the **project_save_path** attribute should already be set. If **project_save_path** is not set, then it will be set to **dirname**. """ if dirname == "": dirname = self.project_save_path else: self.project_save_path = dirname # Do some error checking if dirname == "": raise IOError("Cannot save project to empty path.") elif os.path.isfile(dirname): raise IOError('Cannot save project to directory "%s"; ' 'file exists.' % dirname) logger.info('Starting save of project to "%s"' % dirname) if not os.path.isdir(dirname): logger.info(' Creating directory "%s"' % dirname) os.mkdir(dirname) # TODO: We should formalize this dependency at some point and move # this import to the top level from configobj import ConfigObj config = ConfigObj() config["Project"] = {} exp = {} for i, experiment in enumerate(self.experiments): if experiment.name is not None and experiment.name != "": safename = self._encode_name(experiment.name) else: safename = "Experiment_%d" % i exp[safename] = experiment.save(basename=dirname, dirname=safename) logger.info(' Saved experiment "%s" to subdir "%s"' % \ (experiment.name, safename)) config["Experiments"] = exp if self.active_experiment is not None: config["Project"]["active_experiment"] = self.active_experiment.name contexts = {} for i, ctx in enumerate(self.contexts): if ctx.name is not None and ctx.name != "": safename = self._encode_name(ctx.name) else: safename = "Context_%d" % i filename = safename + ".pickle" contexts[safename] = dict(name=ctx.name, file=filename) ctx.save(join(dirname, filename)) logger.info(' Saved context "%s" to file "%s"' % \ (ctx.name, filename)) config["Contexts"] = contexts config.filename = join(dirname, self.PROJECT_FILE_NAME) logger.info(' Writing project to "%s"' % config.filename) config.write() logger.info('Finished saving project to "%s"' % dirname) return
from flaskskeleton.model import * # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata from flaskskeleton.model import db target_metadata = db.metadata # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. from configobj import ConfigObj configobj = ConfigObj('config/dev.config', configspec='config/dev.configspec') alembic_config = config.get_section(config.config_ini_section) config.set_main_option('sqlalchemy.url', configobj['webapp']['database_uri']) alembic_config['sqlalchemy.url'] = configobj['webapp']['database_uri'] def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the
def __init__(self, *args, **kwargs): super(TestTimeseries, self).__init__(*args, **kwargs) config_path = os.path.join(os.getcwd(), 'tests', 'test_configs', 'test_run_no_sta.cfg') self.config = ConfigObj(config_path) self.project_path = os.path.join(os.getcwd(), '..', 'testproject')
def parse_ini_races(): races = OrderedDict() if (CFG_PATH / 'races.ini').isfile(): imported = ConfigObj(CFG_PATH / 'races.ini') no_category = [] for name, data in imported.items(): for alias, value in data.items(): if alias.startswith('racealias_'): _aliases[alias] = value fixed_name = FIX_NAME.sub('', name.lower().replace(' ', '_')) settings = races[fixed_name] = ImportedRace(fixed_name, ModuleType.ESS_INI) settings.cmds['preloadcmd'] = data['preloadcmd'] settings.cmds['roundstartcmd'] = data['roundstartcmd'] settings.cmds['roundendcmd'] = data['roundendcmd'] settings.cmds['spawncmd'] = data['spawncmd'] settings.cmds['deathcmd'] = data['deathcmd'] settings.cmds['changeintocmd'] = None settings.cmds['changefromcmd'] = data['onchange'] settings.config['required'] = int(data['required']) settings.config['maximum'] = int(data['maximum']) settings.config['restrictmap'] = data['restrictmap'].split('|') if data['restrictmap'] else [] settings.config['restrictitem'] = data['restrictitem'].split('|') if data['restrictitem'] else [] settings.config['restrictweapon'] = [] settings.config['restrictteam'] = int(data['restrictteam']) settings.config['teamlimit'] = int(data.get('teamlimit', 0)) settings.config['author'] = data['author'] settings.config['allowonly'] = data['allowonly'].split('|') if data['allowonly'] else [] skillnames = data['skillnames'].split('|') skilldescr = data['skilldescr'].split('|') skillcfg = data['skillcfg'].split('|') skillneeded = data['skillneeded'].split('|') numberoflevels = map(int, data['numberoflevels'].split('|')) if '|' in data['numberoflevels'] else [int(data['numberoflevels'])] * len(skillnames) skills = settings.config['skills'] = {} for i, skill_name in enumerate(skillnames): fixed_skill_name = FIX_NAME.sub('', skill_name.lower().replace(' ', '_')) settings.strings[fixed_skill_name] = _LanguageString(skill_name) settings.strings[f'{fixed_skill_name} description'] = _LanguageString(skilldescr[i].replace(r'\n', '')) skill = skills[fixed_skill_name] = {} skill['event'] = [skillcfg[i]] skill['required'] = [int(skillneeded[i])] * numberoflevels[i] if 'cooldown' in data[f'skill{i + 1}']: skill['cooldown'] = list(map(lambda x: float(x) if '.' in x else int(x), data[f'skill{i + 1}']['cooldown'].split('|'))) if not len(skill['cooldown']) == numberoflevels[i]: skill['cooldown'] = [skill['cooldown'][0]] * numberoflevels[i] skill['variables'] = {} skill['cmds'] = {} skill['cmds']['setting'] = data[f'skill{i + 1}']['setting'].split('|') if 'block' in data[f'skill{i + 1}']: skill['cmds']['cmd'] = 'es_xdoblock ' + data[f'skill{i + 1}']['block'] else: skill['cmds']['cmd'] = data[f'skill{i + 1}']['cmd'] skill['cmds']['sfx'] = data[f'skill{i + 1}']['sfx'] count = len(data[f'skill{i + 1}']['setting'].split('|')) if count: skill['maximum'] = count else: skill['maximum'] = numberoflevels[i] for alias, value in data[f'skill{i + 1}'].items(): if alias.startswith('racealias_'): _aliases[alias] = value settings.strings['name'] = _LanguageString(name) settings.strings['description'] = _LanguageString(data['desc'].replace(r'\n', '')) categories = (data['category'].split('|') if data['category'] and not data['category'] == '0' else []) if 'category' in data else [] if categories: for category in categories: if category == '0': no_category.append(settings) continue fixed_category = FIX_NAME.sub('', category.lower().replace(' ', '_')) if fixed_category not in categories_strings: categories_strings[fixed_category] = _LanguageString(category) settings.add_to_category(fixed_category) else: no_category.append(settings) for settings in no_category: settings.add_to_category(None) return races
sickbeard.FLATTEN_FOLDERS_DEFAULT = 0 sickbeard.NAMING_PATTERN = '' sickbeard.NAMING_ABD_PATTERN = '' sickbeard.NAMING_SPORTS_PATTERN = '' sickbeard.NAMING_MULTI_EP = 1 sickbeard.PROVIDER_ORDER = ["sick_beard_index"] sickbeard.newznabProviderList = providers.getNewznabProviderList("'Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0|eponly|0|0|0!!!NZBs.org|https://nzbs.org/||5030,5040,5060,5070,5090|0|eponly|0|0|0!!!Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040,5060|0|eponly|0|0|0'") sickbeard.providerList = providers.makeProviderList() sickbeard.PROG_DIR = os.path.abspath(os.path.join(TESTDIR, '..')) sickbeard.DATA_DIR = TESTDIR sickbeard.CONFIG_FILE = os.path.join(sickbeard.DATA_DIR, "config.ini") sickbeard.CFG = ConfigObj(sickbeard.CONFIG_FILE) sickbeard.BRANCG = sickbeard.config.check_setting_str(sickbeard.CFG, 'General', 'branch', '') sickbeard.CUR_COMMIT_HASH = sickbeard.config.check_setting_str(sickbeard.CFG, 'General', 'cur_commit_hash', '') sickbeard.GIT_USERNAME = sickbeard.config.check_setting_str(sickbeard.CFG, 'General', 'git_username', '') sickbeard.GIT_PASSWORD = sickbeard.config.check_setting_str(sickbeard.CFG, 'General', 'git_password', '', censor_log=True) sickbeard.LOG_DIR = os.path.join(TESTDIR, 'Logs') sickbeard.logger.logFile = os.path.join(sickbeard.LOG_DIR, 'test_sickbeard.log') createTestLogFolder() sickbeard.CACHE_DIR = os.path.join(TESTDIR, 'cache') createTestCacheFolder() sickbeard.logger.initLogging(False, True)
sgroup = parser.add_argument_group("%s" % p.name, p.desc) sgroup.add_argument("--%s" % p.optname, action="store_true", help="Load plugin %s" % p.name) if p.has_opts: p.add_options(sgroup) except NotImplementedError: sys.exit("[-] %s plugin claimed option support, but didn't have it." % p.name) args = parser.parse_args() try: configfile = ConfigObj(args.configfile) except Exception, e: sys.exit("[-] Error parsing config file: " + str(e)) config_args = configfile['MITMf']['args'] if config_args: print "[*] Loading arguments from config file" for arg in config_args.split(' '): sys.argv.append(arg) args = parser.parse_args() #################################################################################################### # Here we check for some variables that are very commonly used, and pass them down to the plugins try: args.ip_address = get_if_addr(args.interface)
# Minimal tests RUN_SLOW_TESTS = False # quick n dirty method to see if internet is on try: _ = urlopen('http://www.google.com', timeout=1) HAS_INTERNET = True except URLError: HAS_INTERNET = False # check if there is a credentials file (should be added to .gitignore) cred_path = os.path.abspath(os.path.join(__file__, "../../..", '.credentials')) if os.path.exists(cred_path): HAS_CREDENTIALS = True try: cred = ConfigObj(cred_path) except ConfigObjError: raise else: HAS_CREDENTIALS = False cred = None def requires_internet(test): # Test decorator msg = 'requires internet' return test if HAS_INTERNET else unittest.skip(msg)(test) def requires_py3(test): # Test decorator
def write_conf(conf_dict, conf_file): config = ConfigObj() config.filename = conf_file for k, v in conf_dict.items(): config[k] = v config.write()
#!/usr/bin/env python3 '''Tämä on asiakas-esimerkki joka ottaa yhteyttä flaskin socket-palvelimelle''' import subprocess, json, time, datetime, threading, websocket from configobj import ConfigObj config = ConfigObj('/boot/asetukset.txt') def lokita(data, flush=True): with open("/home/pi/batloki", "a") as kirj: aika = str(datetime.datetime.now()) kirj.write(aika + " " + data + "\n") print(aika + " " + data, flush=flush) def kyseleNaapurit( ): #kyselee batmanin näkemät naapurilaitteet # [('00:c0:ca:98:8f:9f', ' 0.500', ' 7.8'), ('00:c0:ca:98:8e:ed', ' 0.190', '35.6')] tamaLaiteMAC = subprocess.getoutput( 'sudo batctl n|grep -o "MAC:.*"|cut -d "/" -f2|cut -d " " -f1') tamaLaiteIP = subprocess.getoutput( 'ifconfig |grep bat0 -A1|tail -n 1|grep -o "inet.*" | cut -d " " -f 2' ) #vittu mitä paskaa, fiksaa tää :D naapuritraaka = subprocess.getoutput('sudo batctl n -H').split("\n") jrivi = '{"laite": "' + config.get( "mesh_name" ) + '", "mac": "' + tamaLaiteMAC + '", "ip": "' + tamaLaiteIP + '", "data": [' for n in naapuritraaka: nmac = n[0:17] nviive = n[19:26] nteho = n[36:40] if len(nmac) > 0: jrivi += '{"mac": "' + nmac + '", "viive": "' + nviive + '", "teho": "' + nteho + '"},'
def test_configfile(self): from configobj import ConfigObj config = ConfigObj('config/mitmf.conf')
def _get_default_config(): '''Create the default configuration object.''' _default_config = ConfigObj() _default_config.merge({ 'scale_factor': 1., 'iter_list': ['g_AMPA_total', 'g_GABA_total'], 'output_dir': 'panels/', 'grids_data_root': 'simulation_data/main_network/grids', 'bump_data_root': 'simulation_data/main_network/gamma_bump', 'vel_data_root': 'simulation_data/main_network/velocity', 'const_pos_data_root': 'simulation_data/main_network/const_position', 'singleDataRoot': 'simulation_data/main_network/single_neuron', 'connection_data_root': 'simulation_data/main_network/connections', 'even_shape': (31, 31), 'noise_sigmas': [0, 150, 300], # Sections 'mpl': { 'font.size': 11, 'pdf.fonttype': 42, 'mathtext.default': 'regular', 'font.sans-serif': ['Helvetica', 'Avant Garde', 'Computer Modern Sans serif'], 'xtick.major.size': tick_len, 'xtick.major.width': tick_width, 'xtick.minor.size': tick_len / 2., 'xtick.minor.width': tick_width, 'xtick.direction': 'out', 'ytick.major.size': tick_len, 'ytick.major.width': tick_width, 'ytick.minor.size': tick_len / 2., 'ytick.minor.width': tick_width, 'ytick.direction': 'out', }, 'sweeps': { 'fig_size': (3.7, 2.6), # inches 'bbox': (0.08, 0.2, .72, .65), # l, b, w, h 'transparent': True, 'grid_contours': [.5], 'contours_kwargs': { 'hold': True, 'colors': 'k', 'linewidths': [1.5] }, }, 'grids': { 'example_rc': ((5, 15), (15, 5)), 'example_idx': [(5, 15), (5, 15), (5, 15)], # (row, col) 'ntrials': 3, }, 'gamma': { 'example_rc': ((5, 15), (15, 5)), }, 'bumps': { 'n_trials': 5, }, 'p_bumps': { 'frac_total_text': 'P(bumps)' }, 'bump_sigma': { 'sigma_bump_text': '$\sigma_{bump}^{-1}\ (neurons^{-1})$', }, 'seizures': { 'thetaT': 125., # ms 'sig_dt': .5 # ms }, 'vel_rasters': { 'tLimits': [2e3, 3e3], # ms 'trialNum': 0, 'ylabelPos': -0.22, }, }) ############################################################################## GridSweepsPlotter_config = { 'cbar': [0, 0, 1], 'cbar_kw': { 'label': 'Gridness score', 'location': 'right', 'shrink': 0.8, 'pad': -0.05, 'ticks': ti.MultipleLocator(0.5), 'rasterized': True }, 'sigma_title': True, 'vmin': -0.5, 'vmax': 1.111, 'xlabel': [None, None, None], 'xticks': [True, True, True], 'ylabel': [None, '', ''], 'yticks': [True, False, False], 'ann': [ dict(txt='b', rc=_default_config['grids']['example_rc'][0], xytext_offset=(1.5, 1), color='black'), dict(txt='a', rc=_default_config['grids']['example_rc'][1], xytext_offset=(0.5, 1.5), color='black') ], 'plot_contours': [0, 0, 0], } _default_config['GridSweepsPlotter'] = GridSweepsPlotter_config ############################################################################## GridExamplesPlotter_config = { 'fig_size': (1, 1.2), 'ax_box': (0.01, 0.01, 0.99, 0.85), # l, b, r, t 'transparent': True, 'population_type': 'E', } _default_config['GridExamplesPlotter'] = GridExamplesPlotter_config ############################################################################## GridExampleRectPlotter_config = { 'cbar_kw': { 'label': 'Gridness score', 'location': 'right', 'shrink': 0.8, 'pad': -0.05, 'ticks': ti.MultipleLocator(0.5), 'rasterized': True }, 'vmin': -0.505, 'vmax': 1.111, 'fig_saver': PdfOutputSaver(None, 'pdf') } _default_config['GridExampleRectPlotter'] = GridExampleRectPlotter_config ########################################################################## SpatialInfoPlotter_config = { 'cbar': [0, 0, 0], 'cbar_kw': { 'label': 'Information (bits/spike)', 'location': 'right', 'shrink': 0.8, 'pad': -0.15, 'ticks': ti.MultipleLocator(0.5), 'rasterized': True }, 'sigma_title': True, 'vmin': 0.14, 'vmax': 2.66, 'xlabel': ['', '', ''], 'xticks': [False, False, False], 'ylabel': [None, '', ''], 'yticks': [True, False, False], 'plot_contours': [1, 1, 1], } _default_config['SpatialInfoPlotter'] = SpatialInfoPlotter_config ########################################################################## SpatialSparsityPlotter_config = { 'cbar': [0, 0, 0], 'cbar_kw': { 'label': 'Sparsity', 'location': 'right', 'shrink': 0.8, 'pad': -0.15, 'ticks': ti.MultipleLocator(0.2), 'rasterized': True }, 'sigma_title': True, 'vmin': 0.12, 'vmax': 0.89, 'xlabel': ['', '', ''], 'xticks': [False, False, False], 'ylabel': [None, '', ''], 'yticks': [True, False, False], 'plot_contours': [1, 1, 1], } _default_config['SpatialSparsityPlotter'] = SpatialSparsityPlotter_config ############################################################################## GridnessCorrelationPlotter_config = { 'fig_size': (3.5, 1.5), 'bbox_rect': (0.2, 0.35, 0.95, .95), } _default_config[ 'GridnessCorrelationPlotter'] = GridnessCorrelationPlotter_config ############################################################################## GridsDiffSweep_config = { 'cbar_kw': dict(label='$\Delta_{150 - 0}$(Gridness score)', location='right', shrink=0.8, pad=-0.05, ticks=ti.MultipleLocator(0.5), rasterized=True) } _default_config['GridsDiffSweep'] = GridsDiffSweep_config ############################################################################## GridDetailedNoisePlotter_config = { 'legend': ['a', 'b'], 'legend_kwargs': dict( loc=(0.8, 1), fontsize='small', frameon=False, numpoints=1, handletextpad=0.05, ) } _default_config[ 'GridDetailedNoisePlotter'] = GridDetailedNoisePlotter_config ############################################################################## GridDetailedNoisePlotter_config = { 'legend': ['a', 'b'], 'legend_kwargs': dict( loc=(0.8, 1), fontsize='small', frameon=False, numpoints=1, handletextpad=0.05, ) } _default_config[ 'GridDetailedNoisePlotter'] = GridDetailedNoisePlotter_config ############################################################################## GammaDetailedNoisePlotter_config = { 'legend': ['a', 'b'], 'legend_kwargs': dict( loc=(0.85, 0.7), fontsize='small', frameon=False, numpoints=1, handletextpad=0.05, ) } _default_config[ 'GammaDetailedNoisePlotter'] = GammaDetailedNoisePlotter_config ############################################################################## VmExamplesPlotter_config = { 'fig_size': (2.5, 1.25), 'ax_rect': (0.01, 0.01, 0.999, 0.6), # l, b, r, t } _default_config['VmExamplesPlotter'] = VmExamplesPlotter_config ############################################################################## ConnectionFunctionPlotter_config = { 'fig_size': (3, 1.5), 'bbox_rect': (.2, .25, .95, .75), 'uniform_random': False, 'leg1_kwargs': dict(loc=(.6, .9), frameon=False, fontsize='x-small', ncol=1), 'leg2_kwargs': dict(loc=(0.45, 1.03), frameon=False, fontsize='x-small'), } _default_config[ 'ConnectionFunctionPlotter'] = ConnectionFunctionPlotter_config ############################################################################## GammaSweepsPlotter_config = { 'scale_factor': .9, 'cbar': [1, 0, 0], 'cbar_kw': { # This has to match cbar_kw-s below 'location': 'left', }, 'AC_cbar_kw': dict( location='left', ticks=ti.MultipleLocator(0.3), fraction=0.25, shrink=0.8, pad=.2, labelpad=8, label='$1^{st}$ autocorrelation\npeak', rasterized=True, ), 'AC_xticks': [False] * 3, 'AC_yticks': [1, 0, 0], 'AC_sigma_title': True, 'AC_vmin': -0.09, 'AC_vmax': 0.675, 'F_cbar_kw': dict(location='left', ticks=ti.MultipleLocator(30), fraction=0.25, shrink=0.8, pad=.2, labelpad=8, label='Oscillation\nfrequency (Hz)', extend='max', extendfrac=0.1, rasterized=True), 'F_xticks': [True] * 3, 'F_yticks': [1, 0, 0], 'F_sigma_title': False, 'F_vmin': 30, 'F_vmax': 120, 'ann': [ dict( txt='b', rc=None, xytext_offset=(1.5, 0), color='white', ), dict( txt='a', rc=None, xytext_offset=(-.5, 2.), color='white', ), ], 'plot_grid_contours': [0, 1, 0], } _default_config['GammaSweepsPlotter'] = GammaSweepsPlotter_config tmp = GammaSweepsPlotter_config _default_config['GammaSweepsPlotter']['ann'][0]['rc'] = \ _default_config['gamma']['example_rc'][0] _default_config['GammaSweepsPlotter']['ann'][1]['rc'] = \ _default_config['gamma']['example_rc'][1] _default_config['GammaSweepsPlotter'].update({ 'annF': deepcopy(tmp['ann']), }) ############################################################################## GammaExamplePlotter_config = { # index0: noise_sigma # index1: example index 'xscales': [ [0, 0, 0], [0, 0, 1], ], 'sigma_titles': [ [0, 0, 0], [1, 1, 1], ], 'xscale_kw': dict(scaleLen=50, x=0.75, y=-0.07, size='x-small'), 'yscale_kw': [[ dict(scaleLen=5, unitsText='nA', x=.5, y=.1, size='x-small'), dict(scaleLen=0.5, unitsText='nA', x=.5, y=.05, size='x-small'), dict(scaleLen=0.5, unitsText='nA', x=.5, y=.05, size='x-small') ], [ dict(scaleLen=5, unitsText='nA', x=.5, y=.1, size='x-small'), dict(scaleLen=0.5, unitsText='nA', x=.5, y=.05, size='x-small'), dict(scaleLen=0.5, unitsText='nA', x=.55, y=0, size='x-small') ]], } _default_config['GammaExamplePlotter'] = GammaExamplePlotter_config ############################################################################## GammaScatterAllPlotter_config = { 'fig_size': (4.2, 2), 'dot_size': 6, 'legend_kwargs': dict(loc=(0.9, 0.4), fontsize='small', frameon=False, numpoints=1, title='$\sigma$ (pA)'), 'bbox_rect': (.1, .35, .95, .85), 'ylabel': '', } _default_config['GammaScatterAllPlotter'] = GammaScatterAllPlotter_config ############################################################################## GammaFreqGridsScatterAllPlotter_config = { 'fig_size': (4.2, 2), 'dot_size': 6, 'legend_kwargs': dict(loc=(0.8, 0.4), fontsize='small', frameon=False, numpoints=1, title='$\sigma$ (pA)'), 'bbox_rect': (.1, .35, .95, .85), 'ylabel': '', 'yticks': True, } _default_config[ 'GammaFreqGridsScatterAllPlotter'] = GammaFreqGridsScatterAllPlotter_config ############################################################################## GammaScatterPBumpsAllPlotter_config = { 'fig_size': (4.5, 2.6), 'bbox_rect': (0.3, 0.22, 0.82, 0.95), 'xlabel': '', 'legend_kwargs': dict(loc=(1.05, 0.5), fontsize='small', frameon=False, title='$\sigma$ (pA)'), } _default_config[ 'GammaScatterPBumpsAllPlotter'] = GammaScatterPBumpsAllPlotter_config ############################################################################## GammaPBumpsProbabilityPlotter_config = { 'fig_size': (2.7, 2.7), # inches 'bbox_rect': (0.25, 0.2, 0.95, 0.9), } _default_config[ 'GammaPBumpsProbabilityPlotter'] = GammaPBumpsProbabilityPlotter_config ############################################################################## GammaFreqPBumpsProbabilityPlotter_config = { 'fig_size': (2.7, 2.7), # inches 'bbox_rect': (0.25, 0.2, 0.95, 0.9), } _default_config[ 'GammaFreqPBumpsProbabilityPlotter'] = GammaFreqPBumpsProbabilityPlotter_config ############################################################################## GammaGridsProbabilityPlotter_config = { 'fig_size': (2.7, 2.7), # inches 'bbox_rect': (0.25, 0.2, 0.95, 0.9), 'title_size': 'medium', } _default_config[ 'GammaGridsProbabilityPlotter'] = GammaGridsProbabilityPlotter_config ############################################################################## GammaFreqGridsProbabilityPlotter_config = { 'fig_size': (2.7, 2.7), # inches 'bbox_rect': (0.25, 0.2, 0.95, 0.9), 'title_size': 'x-small', } _default_config[ 'GammaFreqGridsProbabilityPlotter'] = GammaFreqGridsProbabilityPlotter_config ############################################################################## fracTotalText = _default_config['p_bumps']['frac_total_text'] FracTotalSweepAnnPlotter_config = { 'scale_factor': .8, 'cbar': (1, 0, 0), 'cbar_kw': dict(label=fracTotalText, location='left', shrink=0.8, pad=0.25, ticks=ti.MultipleLocator(0.5), rasterized=True) } _default_config[ 'FracTotalSweepAnnPlotter'] = FracTotalSweepAnnPlotter_config ############################################################################## MainBumpFormationPlotter_config = { 'scale_factor': 1., 'cbar': [0, 0, 1], 'cbar_kw': dict(label="P(bumps)", location='right', shrink=0.8, pad=-.05, ticks=ti.MultipleLocator(0.5), rasterized=True), 'xticks': [True] * 3, 'plot_grid_contours': [1, 1, 1], } _default_config[ 'MainBumpFormationPlotter'] = MainBumpFormationPlotter_config ############################################################################## _default_config['MainIsBumpPlotter'] = FracTotalSweepAnnPlotter_config ############################################################################## _default_config['IsBumpPlotter'] = FracTotalSweepAnnPlotter_config ############################################################################## IsBumpExamplePlotter_config = { 'bumpQualityX': -.9, 'rateColors': ['k', 'k', 'k', 'k', 'k', 'k', 'k', 'k', 'k', 'k'], #'rateColors': ['k', 'yellow', 'yellow', 'yellow', 'yellow', 'yellow', # 'yellow', 'yellow', 'yellow', 'yellow'], 'cbar_fig_size': (0.6, 0.6), } _default_config['IsBumpExamplePlotter'] = IsBumpExamplePlotter_config ############################################################################## MainScatterGridsBumpsPlotter_config = { 'fig_size': (4.5, 2.6), 'bbox_rect': (0.3, 0.22, 0.82, 0.95), 'xlabel': '', 'legend': False, 'legend_kwargs': dict(loc=(1.05, 0.5), fontsize='small', frameon=False, handletextpad=0, title='$\sigma$ (pA)'), } _default_config[ 'MainScatterGridsBumpsPlotter'] = MainScatterGridsBumpsPlotter_config ############################################################################## BumpDriftAtTimePlotter_config = { 'scale_factor': .8, 'cbar_kw': dict(label='Average bump drift\n(neurons)', location='right', shrink=0.8, pad=-0.05, ticks=ti.MultipleLocator(10), rasterized=True), 'plot_grid_contours': [1, 1, 1], } _default_config['BumpDriftAtTimePlotter'] = BumpDriftAtTimePlotter_config ############################################################################## BumpDiffAtInitPlotter_config = { 'cbar_kw': dict(label='Distance from init\nposition (neurons)', location='right', shrink=0.8, pad=-0.05, ticks=ti.MultipleLocator(10), rasterized=True) } _default_config['BumpDiffAtInitPlotter'] = BumpDiffAtInitPlotter_config ############################################################################## BumpDiffResetPlotter_config = { 'scale_factor': .8, 'cbar_kw': dict(label='Distance from reset\nposition (neurons)', location='right', shrink=0.8, pad=-0.05, ticks=ti.MultipleLocator(5), rasterized=True), 'plot_grid_contours': [1, 1, 1], } _default_config['BumpDiffResetPlotter'] = BumpDiffResetPlotter_config ############################################################################## MaxPopulationFRSweepsPlotter_config = { 'cbar': [1, 0, 0], 'cbar_kw': dict(label="$E-rate_{max}$ (Hz)", location='left', shrink=0.8, pad=0.25, ticks=ti.MultipleLocator(100), rasterized=True), 'plot_grid_contours': [1, 1, 1], 'grid_contours': [.5], } _default_config[ 'MaxPopulationFRSweepsPlotter'] = MaxPopulationFRSweepsPlotter_config ############################################################################## BumpSigmaSweepPlotter_config = { 'cbar': [0, 0, 1], 'cbar_kw': dict(label=_default_config['bump_sigma']['sigma_bump_text'], location='right', shrink=0.8, pad=-0.05, ticks=ti.MultipleLocator(0.2), rasterized=True) } _default_config['BumpSigmaSweepPlotter'] = BumpSigmaSweepPlotter_config ############################################################################## BumpExamplePlotter_config = { 'bbox': (0.01, 0.01, 0.99, 0.82), } _default_config['BumpExamplePlotter'] = BumpExamplePlotter_config ############################################################################## EIRasterPlotter_config = { 'fig_size': (3, 1.9), 'fig_ext': 'pdf', 'yticks': [1, 0, 0], 'ylabelPos': -0.35, 'scaleBar': [None, None, 25], 'scaleX': .85, 'scaleY': -.1, } _default_config['EIRasterPlotter'] = EIRasterPlotter_config ############################################################################## EIRatePlotter_config = { 'fig_size': (3, .65), 'rateTop': .9, 'ylabelPos': -0.35, } _default_config['EIRatePlotter'] = EIRatePlotter_config ############################################################################## MaxMeanThetaFRSweepPlotter_config = { 'cbar_kw': dict( label="max(E rate)/$\\theta$ cycle (Hz)", location='left', shrink=0.8, pad=0.25, ticks=ti.MultipleLocator(100), #ticks = ti.LogLocator(base=4), #format = ti.LogFormatter(4), rasterized=True) } _default_config[ 'MaxMeanThetaFRSweepPlotter'] = MaxMeanThetaFRSweepPlotter_config ############################################################################## PSeizureSweepPlotter_config = { 'FRThreshold': 300, 'plot_grid_contours': [1, 1, 1], 'grid_contours': [.5], } PSeizureSweepPlotter_config.update({ 'cbar_kw': dict(label="P($E-rate_{{max}}$ > {0})".format( PSeizureSweepPlotter_config['FRThreshold']), location='left', shrink=0.8, pad=0.25, ticks=ti.MultipleLocator(0.5), rasterized=True) }) _default_config['PSeizureSweepPlotter'] = PSeizureSweepPlotter_config ############################################################################## MaxFRGridsProbabilityPlotter_config = { 'fig_size': (2.7, 2.7), # inches 'scale_factor': .85, 'bbox_rect': (0.3, 0.22, 0.92, 0.9), } _default_config[ 'MaxFRGridsProbabilityPlotter'] = MaxFRGridsProbabilityPlotter_config ############################################################################## PSeizureGridsProbabilityPlotter_config = { 'FRThreshold': 300, 'fig_size': (2.7, 2.7), # inches 'scale_factor': .85, 'bbox_rect': (0.3, 0.22, 0.92, 0.9), } _default_config[ 'PSeizureGridsProbabilityPlotter'] = PSeizureGridsProbabilityPlotter_config ############################################################################## PSeizureGridsScatterAllPlotter_config = { 'FRThreshold': 300, 'fig_size': (2.5, 2.2), # inches 'bbox_rect': (0.3, 0.23, 0.92, 0.9), 'tight_layout_kwargs': { 'pad': .2, }, 'legend_kwargs': dict(loc=(0.5, 0.6), fontsize='small', frameon=False, numpoints=1, title='$\sigma$ (pA)'), } _default_config[ 'PSeizureGridsScatterAllPlotter'] = PSeizureGridsScatterAllPlotter_config ############################################################################## MaxFRGridsScatterAllPlotter_config = { 'fig_size': (2.5, 2.2), # inches 'bbox_rect': (0.3, 0.23, 0.92, 0.9), 'tight_layout_kwargs': { 'pad': .2, }, 'plot_legend': False, 'legend_kwargs': dict(loc=(0.6, 0.5), fontsize='small', frameon=False, numpoints=1, title='$\sigma$ (pA)'), } _default_config[ 'MaxFRGridsScatterAllPlotter'] = MaxFRGridsScatterAllPlotter_config ############################################################################## MaxStdThetaFRSweepPlotter_config = { 'cbar_kw': dict(label="max(E rate)/$\\theta$ cycle (Hz)", location='left', shrink=0.8, pad=0.25, ticks=ti.MaxNLocator(4), rasterized=True) } _default_config[ 'MaxStdThetaFRSweepPlotter'] = MaxStdThetaFRSweepPlotter_config ############################################################################## _default_config[ 'MaxMedianThetaFRSweepPlotter'] = MaxStdThetaFRSweepPlotter_config ############################################################################## VelSlopeSweepPlotter_config = { 'scale_factor': .8, 'vmin': -.472, 'vmax': 1.353, 'cbar': [0, 0, 1], 'cbar_kw': dict( location='right', shrink=0.8, pad=-0.1, label='Slope\n(neurons/s/pA)', ticks=ti.MultipleLocator(0.4), ), 'plot_contours': [1, 1, 1], } _default_config['VelSlopeSweepPlotter'] = VelSlopeSweepPlotter_config ############################################################################## VelFitErrSweepPlotter_config = { 'scale_factor': .8, 'cbar': [0, 0, 1], 'cbar_kw': dict(label='Fit error (neurons/s)', location='right', shrink=0.8, pad=-0.1, ticks=ti.MultipleLocator(2), rasterized=True), 'ylabel': [None, '', ''], 'yticks': [1, 0, 0], 'plot_contours': [1, 1, 1], 'vmin': 0, 'vmax': 11.2, } _default_config['VelFitErrSweepPlotter'] = VelFitErrSweepPlotter_config ############################################################################## VelLinesPlotter_config = { 'scale_factor': .8, 'fig_size': (3., 2), 'bbox_rect': (0.4, 0.35, 0.95, 0.65), 'positions': ((5, 15), (5, 15), (5, 15)), 'ivel_range': 11, 'g_ann': False, } _default_config['VelLinesPlotter'] = VelLinesPlotter_config ############################################################################## VelFitStdSweepPlotter_config = { 'scale_factor': .7, 'cbar_kw': dict(location='right', label='Mean $\sigma_{spd}$ (neurons/s)', shrink=0.8, pad=0.05, ticks=ti.MultipleLocator(5), extend='max', extendfrac=0.1) } _default_config['VelFitStdSweepPlotter'] = VelFitStdSweepPlotter_config ############################################################################## VelocityRasterPlotter_config = { 'fig_size': (3.75, 2.2), 'transparent': True, 'bbox': (0.2, 0.2, 0.99, 0.8) } _default_config['VelocityRasterPlotter'] = VelocityRasterPlotter_config ############################################################################## VelocityRatePlotter_config = { 'fig_size': (3.75, 1), 'bbox': (.2, .2, .99, 0.70), 'transparent': True, } _default_config['VelocityRatePlotter'] = VelocityRatePlotter_config ############################################################################## VelocityRasterZoomPlotter_config = { 'fig_size': (3.75 * .75, 1.2), 'ylabelPos': -0.22, 'bbox': (0.2, 0.25, 0.99, 0.95), 'transparent': True, } _default_config[ 'VelocityRasterZoomPlotter'] = VelocityRasterZoomPlotter_config ############################################################################## ThetaSignalPlotter_config = { 'fig_size': (3, .5), 'T': .5e3, # ms 'bbox': (0, .05, 1., .95), # l, b, r, t 'color': (0, 0, 0, .3), } _default_config['ThetaSignalPlotter'] = ThetaSignalPlotter_config ############################################################################## PACExamplePlotter_config = { 'fig_size': (5, 3.5), 'bbox': (0, .05, 1., .95), # l, b, r, t 'letter_xy': (0, 1.), 'theta_color': 'k', 'gamma_color': 'b', } _default_config['PACExamplePlotter'] = PACExamplePlotter_config ############################################################################## RasterExamplePlotter_config = { 'fig_size': (6.2, 8.3), 'sweep_rect': (.12, .73, .45, .95), 'cbar_kw': dict(label="Mean $E-rate_{max}^{\\theta}$ (Hz)", location='right', shrink=0.8, pad=.05, ticks=ti.MultipleLocator(250), rasterized=True), 'FRThreshold': 300., 'ylabelPos': -0.1, 'markersize': 1.5, 'plot_ann_txt': True, 'theta_color': (0, 0, 0, .3), 'fig_saver': SeparateMultipageSaver(None, 'pdf') } _default_config['RasterExamplePlotter'] = RasterExamplePlotter_config ############################################################################## ScatterGammaGridsSeparatePlotter_config = { 'fig_size': (5., 6.7), #'bbox_rect': (0.12, 0.17, 0.98, 0.92), } _default_config[ 'ScatterGammaGridsSeparatePlotter'] = ScatterGammaGridsSeparatePlotter_config ############################################################################## ScatterGammaFGridsSeparatePlotter_config = { 'fig_size': (5., 6.7), #'bbox_rect': (0.12, 0.17, 0.98, 0.92), } _default_config[ 'ScatterGammaFGridsSeparatePlotter'] = ScatterGammaFGridsSeparatePlotter_config ############################################################################## GridsPBumpsProbabilityPlotter_config = { 'fig_size': (2.7, 2.7), # inches 'bbox_rect': (0.25, 0.2, 0.95, 0.9), 'title_size': 'medium', } _default_config[ 'GridsPBumpsProbabilityPlotter'] = GridsPBumpsProbabilityPlotter_config ############################################################################## GridBumpScatterPlotter_config = { 'fig_size': (8.27, 11.69), 'color_box_width': .165 } GridBumpScatterPlotter_config.update({ 'color_box_coords': { 'left': 0.14, # w = 0.165 'bottom': .85, 'right': .14 + GridBumpScatterPlotter_config['color_box_width'], 'top': .95 } #'bbox_rect': (0.12, 0.17, 0.98, 0.92), }) _default_config['GridBumpScatterPlotter'] = GridBumpScatterPlotter_config ############################################################################## GridSimpleExamplePlotter_config = { 'fig_size': (5.4, 2.5), 'transparent': True, 'ns_idx': 0, 'rc': (25, 2), 'trial_no': 0, } _default_config[ 'GridSimpleExamplePlotter'] = GridSimpleExamplePlotter_config ############################################################################## Burak2009ConnectionPlotter_config = { 'fig_size': (2, 2), } _default_config[ 'Burak2009ConnectionPlotter'] = Burak2009ConnectionPlotter_config ############################################################################## FRSweepPlotter_config = { 'scale_factor': .8, 'cbar_kw': { 'location': 'right', # This has to match cbar_kw_e and cbar_kw_i }, 'plot_grid_contours': [1, 1, 1], 'cbar_kw_e': { 'label': 'Mean E Firing rate (Hz)', 'location': 'right', 'shrink': 0.8, 'pad': -0.05, 'ticks': ti.LogLocator(subs=[1, 2, 4, 6, 8]), 'rasterized': True, }, 'cbar_kw_i': { 'label': 'Mean I Firing rate (Hz)', 'location': 'right', 'shrink': 0.8, 'pad': -0.05, 'ticks': ti.LogLocator(subs=[1, 2, 4, 6, 8]), 'rasterized': True, }, } _default_config['FRSweepPlotter'] = FRSweepPlotter_config ############################################################################## ScatterGridsFRAllPlotter_config = { 'fig_size': (4.2, 3), 'dot_size': 6, 'legend_kwargs': dict(loc=(0.4, 0.6), fontsize='small', frameon=False, numpoints=1, title='$\sigma$ (pA)'), 'bbox_rect': (.2, .2, .95, .95), 'ylabel': 'Gridness score', 'yticks': True, } _default_config[ 'ScatterGridsFRAllPlotter'] = ScatterGridsFRAllPlotter_config ########################################################################### GridsVelFitErrProbabilityPlotter_config = { 'fig_size': (2.7, 2.7), # inches 'bbox_rect': (0.25, 0.2, 0.95, 0.9), 'title_size': 'medium', 'data_range': [[0, 11.2], [-.5, 1.2]], } _default_config[ 'GridsVelFitErrProbabilityPlotter'] = GridsVelFitErrProbabilityPlotter_config ########################################################################### _default_config.update({ 'WeightOutE2IPlotter': { 'fig_size': (1.75, 1.75), 'g_idx': 15, 'neuron_idx': 527, 'use_title': False, }, 'WeightOutI2EPlotter': { 'fig_size': (1.75, 1.75), 'g_idx': 15, 'neuron_idx': 527, 'use_title': False, }, 'WeightInE2IPlotter': { 'fig_size': (1.75, 1.75), 'g_idx': 15, 'neuron_idx': 527, 'use_title': False, }, 'WeightInI2EPlotter': { 'fig_size': (1.75, 1.75), 'g_idx': 15, 'neuron_idx': 527, 'use_title': False, }, }) _default_config['WeightGridPlotter'] = { 'fig_size': (3, 3), 'cbar_fig_size': (1, 0.5), 'bbox_rect': (.1, .1, .9, .9), 'g_idx': 15, 'neuron_idx': 527, } _default_config['GridExampleColorbarPlotter'] = { 'fig_size': (0.6, 0.8), } _default_config['BumpExampleColorbarPlotter'] = { 'fig_size': (0.6, 0.6), } ########################################################################### _default_config['HighGridScoreFraction'] = { 'threshold': .5, } ############################################################################## return _default_config
def test_init_config(self): from configobj import ConfigObj config = ConfigObj('test/bot.conf')
def base_config(config_path: Path, config=None): global_params = dict( regions='force_list(default=list())', webhook_target='string(default="")', webhook_header='header(default="", url=webhook_target)', webhook_image='boolean(default=yes)', webhook_image_type='option("vehicle", "original", default="vehicle")', max_prediction_delay='float(default=6)', memory_decay='float(default=300)', image_format= 'string(default="$(camera)_screenshots/%y-%m-%d/%H-%M-%S.%f.jpg")', sample='integer(default=2)', total='integer(default=-1)', mmc='boolean(default=no)', csv_file='string(default="")', jsonlines_file='string(default="")', ) camera = dict( url='string', name='string', active='boolean(default=yes)', # Overridable regions='force_list(default=None)', webhook_target='string(default=None)', webhook_header='header(default=None, url=webhook_target)', webhook_image='boolean(default=None)', webhook_image_type='option("vehicle", "original", default=None)', max_prediction_delay='float(default=None)', memory_decay='float(default=None)', image_format='string(default=None)', sample='integer(default=None)', total='integer(default=None)', mmc='boolean(default=None)', csv_file='string(default=None)', jsonlines_file='string(default=None)', ) def webhook_header_check(value, *args, **kwargs): token = value.split('Token ')[-1] if not token: return None url = 'https://app.parkpow.com/api/v1/parking-list' headers = {'Authorization': f'Token {token}'} try: response = requests.get(url, headers=headers, timeout=10) except (requests.Timeout, requests.ConnectionError): raise ValidateError('Please check your internet connection.') if response.status_code != 200: raise ValidateError('Wrong token.') return value spec = ConfigObj() spec['timezone'] = 'string(default="UTC")' spec['version'] = 'integer(default=2)' spec['cameras'] = dict(__many__=camera, **global_params) if not config_path.exists(): with open(config_path, 'w') as fp: fp.write(DEFAULT_CONFIG.replace('\n', '\r\n')) try: config = ConfigObj(config.split('\n') if config else str(config_path), configspec=spec, raise_errors=True, indent_type=' ') config.newlines = '\r\n' # For Windows except Exception as e: logging.error(e) return None, str(e) validator = Validator({'header': webhook_header_check}) result = config.validate(validator, preserve_errors=True) errors = flatten_errors(config, result) if errors: error_message = 'Config errors:' for section_list, key, error in errors: if error is False: error = 'key %s is missing.' % key elif key is not None: section_list.append(key) section_string = '/'.join(section_list) logging.error('%s: %s', section_string, error) error = f'{section_string}, param: {key}, message: {error}' error_message += f'\n{error}' return None, error_message return config, None
def test_invalid_lines_with_percents(tmpdir, specpath): ini = tmpdir.join('config.ini') ini.write('extra: %H:%M\n') with pytest.raises(ParseError): conf = ConfigObj(str(ini), configspec=specpath, file_error=True)
def initialize(file=None, logging_level='INFO'): """Read the configuration file containing the run's parameters. This should be the first call of most (all?) OGGM simulations. Parameters ---------- file : str path to the configuration file (default: OGGM params.cfg) logging_level : str set a logging level. See :func:`set_logging_config` for options. """ global IS_INITIALIZED global PARAMS global PATHS set_logging_config(logging_level=logging_level) if file is None: file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'params.cfg') try: cp = ConfigObj(file, file_error=True) except (ConfigObjError, IOError) as e: log.critical('Config file could not be parsed (%s): %s', file, e) sys.exit() log.workflow('Using configuration file: %s', file) # Paths oggm_static_paths() PATHS['working_dir'] = cp['working_dir'] PATHS['dem_file'] = cp['dem_file'] PATHS['climate_file'] = cp['climate_file'] # Multiprocessing pool PARAMS['use_multiprocessing'] = cp.as_bool('use_multiprocessing') PARAMS['mp_processes'] = cp.as_int('mp_processes') # Some non-trivial params PARAMS['continue_on_error'] = cp.as_bool('continue_on_error') PARAMS['grid_dx_method'] = cp['grid_dx_method'] PARAMS['topo_interp'] = cp['topo_interp'] PARAMS['use_intersects'] = cp.as_bool('use_intersects') PARAMS['use_compression'] = cp.as_bool('use_compression') PARAMS['mpi_recv_buf_size'] = cp.as_int('mpi_recv_buf_size') PARAMS['use_multiple_flowlines'] = cp.as_bool('use_multiple_flowlines') PARAMS['filter_min_slope'] = cp.as_bool('filter_min_slope') PARAMS['auto_skip_task'] = cp.as_bool('auto_skip_task') PARAMS['correct_for_neg_flux'] = cp.as_bool('correct_for_neg_flux') PARAMS['filter_for_neg_flux'] = cp.as_bool('filter_for_neg_flux') PARAMS['run_mb_calibration'] = cp.as_bool('run_mb_calibration') PARAMS['rgi_version'] = cp['rgi_version'] PARAMS['use_rgi_area'] = cp.as_bool('use_rgi_area') PARAMS['compress_climate_netcdf'] = cp.as_bool('compress_climate_netcdf') PARAMS['use_tar_shapefiles'] = cp.as_bool('use_tar_shapefiles') PARAMS['clip_mu_star'] = cp.as_bool('clip_mu_star') # Climate PARAMS['baseline_climate'] = cp['baseline_climate'].strip().upper() PARAMS['baseline_y0'] = cp.as_int('baseline_y0') PARAMS['baseline_y1'] = cp.as_int('baseline_y1') PARAMS['hydro_month_nh'] = cp.as_int('hydro_month_nh') PARAMS['hydro_month_sh'] = cp.as_int('hydro_month_sh') PARAMS['temp_use_local_gradient'] = cp.as_bool('temp_use_local_gradient') PARAMS['tstar_search_glacierwide'] = cp.as_bool('tstar_search_glacierwide') k = 'temp_local_gradient_bounds' PARAMS[k] = [float(vk) for vk in cp.as_list(k)] k = 'tstar_search_window' PARAMS[k] = [int(vk) for vk in cp.as_list(k)] PARAMS['use_bias_for_run'] = cp.as_bool('use_bias_for_run') # Inversion k = 'use_shape_factor_for_inversion' PARAMS[k] = cp[k] # Flowline model k = 'use_shape_factor_for_fluxbasedmodel' PARAMS[k] = cp[k] # Make sure we have a proper cache dir from oggm.utils import download_oggm_files, get_demo_file download_oggm_files() # Delete non-floats ltr = [ 'working_dir', 'dem_file', 'climate_file', 'use_tar_shapefiles', 'grid_dx_method', 'run_mb_calibration', 'compress_climate_netcdf', 'mp_processes', 'use_multiprocessing', 'baseline_y0', 'baseline_y1', 'temp_use_local_gradient', 'temp_local_gradient_bounds', 'topo_interp', 'use_compression', 'bed_shape', 'continue_on_error', 'use_multiple_flowlines', 'tstar_search_glacierwide', 'mpi_recv_buf_size', 'hydro_month_nh', 'clip_mu_star', 'tstar_search_window', 'use_bias_for_run', 'hydro_month_sh', 'use_intersects', 'filter_min_slope', 'auto_skip_task', 'correct_for_neg_flux', 'filter_for_neg_flux', 'rgi_version', 'use_shape_factor_for_inversion', 'use_rgi_area', 'use_shape_factor_for_fluxbasedmodel', 'baseline_climate' ] for k in ltr: cp.pop(k, None) # Other params are floats for k in cp: PARAMS[k] = cp.as_float(k) # Read-in the reference t* data - maybe it will be used, maybe not fns = [ 'ref_tstars_rgi5_cru4', 'ref_tstars_rgi6_cru4', 'ref_tstars_rgi5_histalp', 'ref_tstars_rgi6_histalp' ] for fn in fns: PARAMS[fn] = pd.read_csv(get_demo_file('oggm_' + fn + '.csv')) fpath = get_demo_file('oggm_' + fn + '_calib_params.json') with open(fpath, 'r') as fp: mbpar = json.load(fp) PARAMS[fn + '_calib_params'] = mbpar # Empty defaults set_intersects_db() IS_INITIALIZED = True # Pre extract cru cl to avoid problems by multiproc from oggm.utils import get_cru_cl_file get_cru_cl_file()
def __get_value_from_settings(prop_label): config = ConfigObj('misc/settings.ini') return config[prop_label]
def oggm_static_paths(): """Initialise the OGGM paths from the config file.""" global PATHS, PARAMS # See if the file is there, if not create it if not os.path.exists(CONFIG_FILE): dldir = os.path.join(os.path.expanduser('~'), 'OGGM') config = ConfigObj() config['dl_cache_dir'] = os.path.join(dldir, 'download_cache') config['dl_cache_readonly'] = False config['tmp_dir'] = os.path.join(dldir, 'tmp') config['cru_dir'] = os.path.join(dldir, 'cru') config['rgi_dir'] = os.path.join(dldir, 'rgi') config['test_dir'] = os.path.join(dldir, 'tests') config['has_internet'] = True config.filename = CONFIG_FILE config.write() # OK, read in the file try: config = ConfigObj(CONFIG_FILE, file_error=True) except (ConfigObjError, IOError) as e: log.critical('Config file could not be parsed (%s): %s', CONFIG_FILE, e) sys.exit() # Check that all keys are here for k in [ 'dl_cache_dir', 'dl_cache_readonly', 'tmp_dir', 'cru_dir', 'rgi_dir', 'test_dir', 'has_internet' ]: if k not in config: raise RuntimeError('The oggm config file ({}) should have an ' 'entry for {}.'.format(CONFIG_FILE, k)) # Override defaults with env variables if available if os.environ.get('OGGM_DOWNLOAD_CACHE_RO') is not None: ro = bool(strtobool(os.environ.get('OGGM_DOWNLOAD_CACHE_RO'))) config['dl_cache_readonly'] = ro if os.environ.get('OGGM_DOWNLOAD_CACHE') is not None: config['dl_cache_dir'] = os.environ.get('OGGM_DOWNLOAD_CACHE') if os.environ.get('OGGM_EXTRACT_DIR') is not None: # This is for the directories where OGGM needs to extract things # On the cluster it might be useful to do it on a fast disc edir = os.path.abspath(os.environ.get('OGGM_EXTRACT_DIR')) config['tmp_dir'] = os.path.join(edir, 'tmp') config['cru_dir'] = os.path.join(edir, 'cru') config['rgi_dir'] = os.path.join(edir, 'rgi') if not config['dl_cache_dir']: raise RuntimeError('At the very least, the "dl_cache_dir" entry ' 'should be provided in the oggm config file ' '({})'.format(CONFIG_FILE, k)) # Fill the PATH dict for k, v in config.iteritems(): if not k.endswith('_dir'): continue PATHS[k] = os.path.abspath(os.path.expanduser(v)) # Other PARAMS['has_internet'] = config.as_bool('has_internet') PARAMS['dl_cache_readonly'] = config.as_bool('dl_cache_readonly') # Create cache dir if possible if not os.path.exists(PATHS['dl_cache_dir']): if not PARAMS['dl_cache_readonly']: os.makedirs(PATHS['dl_cache_dir'])
sys.setdefaultencoding("UTF-8") current_locale_encoding = locale.getlocale()[1].lower() logger.debug("sys default encoding %s", sys.getdefaultencoding()) logger.debug("After %s", locale.nl_langinfo(locale.CODESET)) if current_locale_encoding not in ["utf-8", "utf8"]: logger.error("Need a UTF-8 locale. Currently '%s'. Exiting..." % current_locale_encoding) sys.exit(1) configure_locale() # loading config file try: config = ConfigObj("/etc/airtime/airtime.conf") except Exception as e: logger.error("Error loading config file: %s", e) sys.exit(1) class Global: def __init__(self, api_client): self.api_client = api_client def selfcheck(self): return self.api_client.is_server_compatible() def test_api(self): self.api_client.test()
def start(args): # Load config file config = ConfigObj(infile=args.config.name) data_dir = config['data_dir'] uwsgi_log_path = config['uwsgi_log_path'] min_msecs = int(config.get('min_msecs', DEFAULT_MIN_MSECS)) url_file = config.get('url_file') # Load custom url rules url_rules = [] if url_file: with open(url_file, 'r') as fp: url_rules = parse_url_rules(fp) html_dir = os.path.join(data_dir, 'html') db_dir = os.path.join(data_dir, 'data') makedir_if_none_exists(html_dir) makedir_if_none_exists(db_dir) save_point = SavePoint(db_dir) last_log_datetime = save_point.get_last_datetime() or \ (datetime.datetime.now() - datetime.timedelta(seconds=REALTIME_UPDATE_INTERVAL)) logger.info('Start from last savepoint, last_log_datetime: %s' % last_log_datetime) last_update_datetime = None url_classifier = URLClassifier(user_defined_rules=url_rules) analyzer = RealtimeLogAnalyzer(url_classifier=url_classifier, min_msecs=min_msecs, start_from_datetime=last_log_datetime) file_tailer = Tailer(uwsgi_log_path) html_render = HTMLRender(html_dir, domain=config.get('domain')) # Listen INT/TERM signal def gracefully_exit(*args): logger.info('Sinal received, exit.') file_tailer.stop_follow() signal.signal(signal.SIGINT, gracefully_exit) for line in file_tailer: # Analyze line if line != no_new_line: analyzer.analyze_line(line) now = datetime.datetime.now() if not file_tailer.trailing: continue if last_update_datetime and \ total_seconds(now - last_update_datetime) < REALTIME_UPDATE_INTERVAL: continue # Render HTML file when: # - file_tailer reaches end of file. # - last_update_datetime if over one `interval` from now # Render latest interval HTML file html_render.render_requests_data_to_html( analyzer.get_data('last_interval'), 'latest_5mins.html', context={'datetime_range': 'Last 5 minutes'}) analyzer.clean_data_by_key('last_interval') for date in list(analyzer.data.keys()): day_requests_data = RequestsData(date, db_dir) merge_requests_data_to(day_requests_data.data, analyzer.get_data(date)) # Render to HTML file html_render.render_requests_data_to_html( day_requests_data.data, 'day_%s.html' % date, context={'datetime_range': date}) # Save data to pickle file day_requests_data.save() # Reset Everything analyzer.clean_data_by_key(date) update_html_symlink(html_dir) last_update_datetime = now if analyzer.last_analyzed_datetime: save_point.set_last_datetime(analyzer.last_analyzed_datetime) save_point.save()