def get_custom_validator(): """ Returns a validator suitable for validating the ShakeMap config files. Returns: :class:`Validator`: A Validator object. """ fdict = { 'file_type': file_type, 'directory_type': directory_type, 'annotatedfloat_type': annotatedfloat_type, 'gmpe_list': gmpe_list, 'weight_list': weight_list, 'extent_list': extent_list, 'status_string': status_string, } validator = Validator(fdict) return validator
def __init__(self): self.spec = confspec #: All loaded profiles by name. self._profileCache = {} #: The active profiles. self.profiles = [] #: Whether profile triggers are enabled (read-only). #: @type: bool self.profileTriggersEnabled = True self.validator = Validator() self.rootSection = None self._shouldHandleProfileSwitch = True self._pendingHandleProfileSwitch = False self._suspendedTriggers = None self._initBaseConf() #: Maps triggers to profiles. self.triggersToProfiles = None self._loadProfileTriggers() #: The names of all profiles that have been modified since they were last saved. self._dirtyProfiles = set()
def read_ini(self): ''' make sure current file validates OK, this will also type-convert values recreate default ini file if bad things happen ''' retries = 2 while True: try: self.config = ConfigObj(self.filename, file_error=True, interpolation=False, configspec=self.spec, raise_errors=True) validator = Validator() results = self.config.validate(validator, preserve_errors=True) if results != True: for (section_list, key, error) in flatten_errors(self.config, results): if key is not None: warn("key '%s' in section '%s' : %s" % (key, ', '.join(section_list), error if error else "missing")) else: warn("section '%s' missing" % ', '.join(section_list)) badfilename = self.filename + '.BAD' os.rename(self.filename, badfilename) retries -= 1 if retries: raise UselessIniError( "cant make sense of '%s', renamed to '%s'" % (self.filename, badfilename)) else: raise Exception(error) except (IOError, TypeError, UselessIniError), msg: warn("%s - creating default" % (msg)) self.create_default_ini() continue except: # pass on ConfigObj's other troubles
def run(configfilename, configspecfile): hardwareconfigfile = 'speckle_instruments.ini' config = ConfigObj(configfilename, configspec=configspecfile) val = Validator() check = config.validate(val) centx = config['IM_PARAMS']['centerx'] centy = config['IM_PARAMS']['centery'] lambdaoverd = config['IM_PARAMS']['lambdaoverd'] regionfilename = config['CONTROLREGION']['filename'] # pharo = hardware.fake_pharo() #Real thing innerlam = config['CONTROLREGION']['innerannulus'] outerlam = config['CONTROLREGION']['outerannulus'] # pharo = hardware.PHARO_COM('PHARO', # configfile = hardwareconfigfile) #LOAD P3K HERE pharo = hardware.fake_pharo() print("Retrieving bgd, flat, badpix") # bgds = flh.setup_bgd_dict(config) fake_bgds = {'bkgd':np.zeros((1024, 1024)), 'masterflat':np.ones((1024, 1024)), 'badpix': np.zeros((1024, 1024))} print "WARNING: USING FAKE BGDS" bgds = fake_bgds.copy() firstim = pharo.take_src_return_imagedata(exptime = 4) image = pre.equalize_image(firstim, **bgds) image = firstim ann, verts = define_control_annulus(image, cx = centx, cy = centy, rad_in = lambdaoverd*innerlam, rad_out= lambdaoverd*outerlam) flh.writeout(ann*1.0, regionfilename) config['CONTROLREGION']['verticesx'] = [centx]+[x[0] for x in verts] config['CONTROLREGION']['verticesy'] = [centy]+[y[1] for y in verts] config.write() print "Configuration file written to "+config.filename
def load_config(self, config): """ Load the configuration file and append it to local dictionary with the content of already loaded options. """ if config not in self.conf_files: # just add the config for tracking purposes, someone injected # config file to us self.conf_files.append(config) # load and validate initial config val = Validator() configspec = ConfigObj(self.__configspecfilename, _inspec=True) cfg = ConfigObj(config, configspec=configspec) if not cfg.validate(val): print_warning( '(none): W: error parsing configuration file: {}'.format( config)) # load multiline defaults cfg = self._load_defaults(cfg, DEFAULTS) cfg = self._load_defaults(cfg, DICT_DEFAULTS) # convert all list items to real lists cfg = self._convert_known_lists(cfg, self.known_lists_merged) cfg = self._convert_known_lists(cfg, self.known_lists_override, True) # for merging we have duplicate object without filled in defaults result = ConfigObj(config) # conver the result stuff to lists too result = self._convert_known_lists(result, self.known_lists_merged) result = self._convert_known_lists(result, self.known_lists_override, True) # merge the dict on where we are merging lists for i in self.known_lists_merged: if self.configuration: if i in self.configuration and i in result: result[i] = result[i] + self.configuration[i] # Merge stuff in a case we alrady have config if self.configuration: self.configuration.merge(result) else: self.configuration = cfg
def load(self, config_file): """ load and validate the configuration from the file. if she is not valid an ValueError is raise """ confspec = [] confspec.append('[connector-at]') confspec.append('rt-topic = string()') confspec.append('exchange-name = string(default="navitia")') confspec.append('at-connection-string = string()') confspec.append('broker-url = string()') confspec.append( 'last-exec-time-file = string(default="./last_exec_time.txt")') confspec.append('jormungandr-url = string()') confspec.append('jormungandr-token = string(default=None)') confspec.append('logger-file = string(default="./connector_at.log")') confspec.append('logger-level = string(default="DEBUG")') confspec.append('sqlalchemy-log-level = string(default="WARN")') config = ConfigObj(config_file, configspec=confspec, stringify=True) val = Validator() res = config.validate(val, preserve_errors=True) #validate returns true, or a dict... if type(res) is dict: error = self.build_error(config, res) raise ValueError("Config is not valid: " + error) self.broker_url = config['connector-at']['broker-url'] self.at_connection_string = config['connector-at'][ 'at-connection-string'] self.exchange_name = config['connector-at']['exchange-name'] self.rt_topic = config['connector-at']['rt-topic'] self.last_exec_time_file = config['connector-at'][ 'last-exec-time-file'] self.jormungandr_url = config['connector-at']['jormungandr-url'] self.jormungandr_token = config['connector-at']['jormungandr-token'] self.logger_file = config['connector-at']['logger-file'] self.logger_level = config['connector-at']['logger-level'] self.sqlalchemy_log_level = config['connector-at'][ 'sqlalchemy-log-level']
def float_list_value(v, minl=None, maxl=None, minv=None, maxv=None): """ Validator function for float lists Args: v: input list minl: (optional) minimum list length allowed maxl: (optional) maximum list length allowed minv: (optional( minimum float value allowed maxv: (optional) maximum float value allowed Returns: Validated float list, i.e. [1.2, 4., 6.7] """ vdt = Validator() is_float = vdt.functions["float"] is_list = vdt.functions["list"] return [ is_float(mem, min=minv, max=maxv) for mem in is_list(v, minl, maxl) ]
def init_and_validate(self): for section_list, key, val in flatten_errors( self, self.validate(Validator())): # TODO add write capability to correct errors and update config file if key: print('The "{failed_key}" key in the section ' '"{failed_section}" failed validation'.format( failed_key=key, failed_section=': '.join(section_list))) print(val) else: # TODO 2: Get the missing section key or use the configspec to add a default which requires user input print('The {section_w_o_key} section ' 'is missing a required setting.'.format( section_w_o_key=': '.join(section_list))) print(val) else: print( 'Settings validated for: {f_name}'.format(f_name=self.f_name)) self.apply_keyword_format() self.apply_custom_format(lvl=self)
def load_config(self, file_name=consts.GLOBAL_CONFIG_FILE): """ Loads global configuration file. """ log.debug("reading and parsing global configuration file '%s'" % file_name) try: self._cfg = ConfigObj(file_name, configspec = self.global_config_spec, raise_errors = True, \ file_error = True, list_values = False, interpolation = False) except IOError as e: raise TunedException( "Global tuned configuration file '%s' not found." % file_name) except ConfigObjError as e: raise TunedException( "Error parsing global tuned configuration file '%s'." % file_name) vdt = Validator() if (not self._cfg.validate(vdt, copy=True)): raise TunedException( "Global tuned configuration file '%s' is not valid." % file_name)
def __init__(self, module_path, hyper_params, use_cuda, mission=1): self.dataset = get_test_set(mission=mission) print("test number:", len(self.dataset)) self.hyper_params = hyper_params self.data_loader = DataLoader( dataset=self.dataset, num_workers=self.hyper_params["threads"], batch_size=self.hyper_params["batch_size"], shuffle=False) self.resnet = get_network() self.resnet.load_state_dict(torch.load(module_path)) if use_cuda: self.resnet = self.resnet.cuda() self.v = Validator(resnet=self.resnet, hyper_params=hyper_params, use_cuda=use_cuda, data_loader=self.data_loader)
def read_camera_config(target): validator = Validator() config = ConfigObj('../configurations/camera.ini' ,configspec='../configurations/camera_configspec.ini') # Validator converts type automatically result = config.validate(validator) if not result: print 'Config file validation failed!' sys.exit(1) camera_cfg = config[target] name = camera_cfg['name'] startY = camera_cfg['startY'] endY = camera_cfg['endY'] startX = camera_cfg['startX'] endX = camera_cfg['endX'] return name,startY,endY,startX,endX
def _load_config(self): spec = ConfigObj() spec['use_sphinx'] = 'boolean(default=False)' spec['sync_on_change'] = 'boolean(default=True)' spec['font_family'] = 'string(default=Monospace)' spec['font_point_size'] = 'integer(default=10)' spec['font_weight'] = 'integer(default = 50)' spec['font_italic'] = 'boolean(default=False)' path = os.path.join(ETSConfig.application_data, 'rest_editor.conf') self.config = ConfigObj(path, configspec=spec, create_empty=True) self.config.validate(Validator(), copy=True) self.use_sphinx = self.config['use_sphinx'] self.sync_on_change = self.config['sync_on_change'] self.default_font.setFamily(self.config['font_family']) self.default_font.setPointSize(self.config['font_point_size']) self.default_font.setWeight(self.config['font_weight']) self.default_font.setItalic(self.config['font_italic']) self.default_font.setStyleHint(QtGui.QFont.TypeWriter)
def load_config(config_path, configspec_path=None, *args, **kwargs): # if os.path.exists(config_path): # clean_config(config_path) spec = ConfigObj(configspec_path, encoding='UTF8', list_values=False, _inspec=True) try: config = ConfigObj(infile=config_path, configspec=spec, create_empty=True, encoding='UTF8', *args, **kwargs) except ParseError: raise ConfigLoadError("Unable to load %r" % config_path) validator = Validator() validated = config.validate(validator, copy=True) if validated == True: config.write() return config
def main(): arguments = docopt(__doc__, version='1.0.0') target = arguments['<target>'] timeout = int(arguments['--timeout']) thread_num = int(arguments['<thread_num>']) process_num = int(arguments['<process_num>']) print('{} {} {} {}'.format(target, timeout, thread_num, process_num)) validator = Validator(target, timeout, process_num, thread_num) ip_all = [] logging.info("Load proxy ip, total: %s", len(ip_all)) result_tmp = validator.run(ip_all) result = [] for one in result_tmp: if one["speed"] > 8: pass else: result.append(one) logging.info("validator run finished") logging.info(len(result)) result = sorted(result, key=lambda x: x["speed"]) return result
def create(self): """ Create default configuration file if it doesn't exists """ if self.configspec and self.configfile: if not os.path.exists(self.configfile): confdir = os.path.dirname(self.configfile) if not os.path.exists(confdir): os.makedirs(confdir) config = ConfigObj(configspec=self.configspec.split('\n'), encoding='UTF8', interpolation=False, list_values=False) config.stringify = False config.write_empty_values = True config.filename = self.configfile validation = Validator() config.validate(validation, preserve_errors=True, copy=True) config.write() os.chmod(self.configfile, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP)
def load_config(): path = find_config() specpath = os.path.join(os.path.dirname(__file__), 'confspec.ini') validator = Validator({ 'expand_path': expand_path, 'cache_path': validate_cache_path, }) config = ConfigObj(path, configspec=specpath, file_error=True) validation = config.validate(validator, preserve_errors=True) for section, key, error in flatten_errors(config, validation): if not error: raise ConfigurationException( ('{} is missing from the {} section of the configuration ' + 'file').format(key, section)) if isinstance(error, VdtValueError): raise ConfigurationException('Bad {} setting, {}'.format( key, error.args[0])) return config
def __init__(self): super(ComicStreamerConfig, self).__init__() self.csfolder = AppFolders.settings() # make sure folder exisits if not os.path.exists( self.csfolder ): os.makedirs( self.csfolder ) # set up initial values self.filename = os.path.join(self.csfolder, "settings") self.configspec=io.StringIO(ComicStreamerConfig.configspec) self.encoding="UTF8" # since some stuff in the configobj has to happen during object initialization, # use a temporary delegate, and them merge it into self tmp = ConfigObj(self.filename, configspec=self.configspec, encoding=self.encoding) validator = Validator() tmp.validate(validator, copy=True) # set up the install ID if tmp['general']['install_id'] == '': tmp['general']['install_id'] = uuid.uuid4().hex #set up the cookie secret if tmp['security']['cookie_secret'] == '': tmp['security']['cookie_secret'] = base64.b64encode(uuid.uuid4().bytes + uuid.uuid4().bytes) # normalize the folder list tmp['general']['folder_list'] = [os.path.abspath(os.path.normpath(unicode(a))) for a in tmp['general']['folder_list']] self.merge(tmp) if not os.path.exists( self.filename ): self.write() # not sure if this belongs here: # if mac app, and no unrar in path, add the one from the app bundle if getattr(sys, 'frozen', None) and platform.system() == "Darwin": if which("unrar") is None: addtopath(AppFolders.appBase())
def test_reloading_with_an_actual_file(self, request, reloadable_cfg_content, cfg_contents): with NamedTemporaryFile(delete=False, mode='wb') as cfg_file: cfg_file.write(reloadable_cfg_content.encode('utf-8')) request.addfinalizer(lambda: os.unlink(cfg_file.name)) configspec = cfg_contents(""" test1= integer(30,50) test2= string test3=integer test4=float(4.5) [section] test1=integer(30,50) test2=string test3=integer test4=float(4.5) [[sub section]] test1=integer(30,50) test2=string test3=integer test4=float(4.5) [section2] test1=integer(30,50) test2=string test3=integer test4=float(4.5) """) cfg = ConfigObj(cfg_file.name, configspec=configspec) cfg.configspec['test1'] = 'integer(50,60)' backup = ConfigObj(cfg_file.name) del cfg['section'] del cfg['test1'] cfg['extra'] = '3' cfg['section2']['extra'] = '3' cfg.reload() assert cfg == backup assert cfg.validate(Validator())
def initGestures(): global gesturesFileExists, iniGestures if profileFileExists and gesturesBDPath() != '?': log.debug('Main gestures map found') confGen = gesturesBDPath() confspec = config.ConfigObj(StringIO(""""""), encoding="UTF-8", list_values=False) iniGestures = config.ConfigObj(confGen, configspec=confspec, indent_type="\t", encoding="UTF-8") result = iniGestures.validate(Validator()) if result is not True: log.exception("Malformed configuration file") gesturesFileExists = False else: gesturesFileExists = True else: if curBD != "noBraille": log.warn('No main gestures map (%s) found' % gesturesBDPath(1)) gesturesFileExists = False if gesturesFileExists: for g in iniGestures["globalCommands.GlobalCommands"]: if isinstance(iniGestures["globalCommands.GlobalCommands"][g], list): for h in range( len(iniGestures["globalCommands.GlobalCommands"][g])): iniGestures[inputCore.normalizeGestureIdentifier( str(iniGestures["globalCommands.GlobalCommands"][g] [h]))] = g elif ('kb:' in g and g not in [ "kb:alt', 'kb:control', 'kb:windows', 'kb:control', 'kb:applications" ] and 'br(' + curBD + '):' in str( iniGestures["globalCommands.GlobalCommands"][g])): iniGestures[inputCore.normalizeGestureIdentifier( str(iniGestures["globalCommands.GlobalCommands"] [g])).replace('br(' + curBD + '):', '')] = g return gesturesFileExists, iniGestures
def rewrite_entries(config, path, specpath, sec=None, sort=False): file = open(path, 'w') file.write(NOTE % specpath) if sec is None: sec = config if sort: sec.scalars.sort() for entry in sec.scalars: v = Validator() etype, eargs, ekwargs, default = v._parse_check(sec[entry]) if default is not None: default = config._quote(default) if etype == 'gpg_key_hint': etype = 'string' description = '\n.. _%s:\n' % entry.replace('_', '-') description += '\n.. describe:: %s\n\n' % entry comments = [sec.inline_comments[entry]] + sec.comments[entry] for c in comments: if c: description += ' '*4 + re.sub('^\s*#', '', c) description = description.rstrip(' ') + '\n' if etype == 'option': description += '\n :type: option, one of %s\n' % eargs else: if etype == 'force_list': etype = 'string list' description += '\n :type: %s\n' % etype if default is not None: default = default.replace('*', '\\*') if etype in ['string', 'string_list', 'gpg_key_hint'] and \ default != 'None': description += ' :default: "%s"\n\n' % (default) else: description += ' :default: %s\n\n' % (default) file.write(description) file.close()
def parse_config(file): v = Validator() spec = ''' [global] url = string(default='https://bugzilla.redhat.com/xmlrpc.cgi') username = string() password = string() '''.splitlines() cfg = ConfigObj(file, configspec=spec) res = cfg.validate(v, preserve_errors=True) for entry in flatten_errors(cfg, res): section_list, key, error = entry section_list.append(key) if not error: error = 'Missing value or section.' print(','.join(section_list), '=', error) sys.exit(1) return cfg['global']
def test_assemble_sim(): homedir = os.path.dirname(os.path.abspath(__file__)) cfgfile = os.path.join(homedir, '..', '..', 'data', 'simulation', 'simulation.conf') specfile = os.path.join(homedir, '..', '..', '..', 'shakemap', 'data', 'simulationspec.conf') simfile = os.path.join(homedir, '..', '..', 'data', 'simulation', 'planet9.csv') cfgfile = os.path.abspath(cfgfile) specfile = os.path.abspath(specfile) simfile = os.path.abspath(simfile) config = ConfigObj(cfgfile, configspec=specfile) vtor = Validator() results = config.validate(vtor) assert results imtgrids = _get_grids(config, simfile) pgasum = np.nansum(imtgrids['PGA'].getData()) pgacomp = -2195.342762128482 np.testing.assert_almost_equal(pgasum, pgacomp) imtlist = list(imtgrids.keys()) imtcmp = ['PGA', 'PGV', 'SA(0.3)', 'SA(1.0)', 'SA(3.0)'] assert sorted(imtlist) == sorted(imtcmp)
def validate(self): for key in self.entrydict.keys(): if key.find("Password") == -1: self.settings[self.section][key] = self.entrydict[key].get() else: self.settings[self.section][key] = myutils.password_obfuscate(self.entrydict[key].get()) errortext="Some of your input contains errors. Detailed error output below.\n\n" val = Validator() valresult=self.settings.validate(val, preserve_errors=True) if valresult != True: if valresult.has_key(self.section): sectionval = valresult[self.section] for key in sectionval.keys(): if sectionval[key] != True: errortext += "Error in item \"" + str(key) + "\": " + str(sectionval[key]) + "\n" tkMessageBox.showerror("Erroneous input. Please try again.", errortext) return 0 else: return 1
def load_config(): global config, app_config, mg_config configspec = ConfigObj('automgtic/config_spec.ini', list_values=False, _inspec=True) if os.path.exists('automgtic_local.ini'): config_path = 'automgtic_local.ini' else: config_path = 'automgtic.ini' config = ConfigObj(config_path, configspec=configspec, interpolation='ConfigParser') validator = Validator() # TODO: Add validation error handling validation_result = config.validate(validator, preserve_errors=True) app_config = config['automgtic'] mg_config = config['mediagoblin']
def __init__(self, pargs): current_time = int(time.time()) self.MEMORY_UPDATE_TIME = current_time self.ASSOCIATE_UPDATE_TIME = current_time self.DB_MAINTENANCE_TIME = current_time self.children = {} self.attached = pargs.attached self.install_path, self.data_path = get_config_paths() self.config = get_config(self.install_path) # # Get shake.conf for the autorun modules # config_file = os.path.join(self.install_path, 'config', 'shake.conf') spec_file = get_configspec('shake') shake_config = ConfigObj(config_file, configspec=spec_file) results = shake_config.validate(Validator()) if not isinstance(results, bool) or not results: config_error(shake_config, results) self.shake_cmds = shlex.split(shake_config['autorun_modules']) # # Turn this process into a daemon # self.logpath = os.path.join(self.install_path, 'logs') if not os.path.isdir(self.logpath): os.makedirs(self.logpath) pidfile = os.path.join(self.logpath, 'queue.pid') self.filelock = lockfile.FileLock(pidfile) if self.filelock.is_locked(): if pargs.break_lock: self.filelock.break_lock() else: logger = self.getLogger() logger.error("pid lock file '%s' exists, can't start " "sm_queue; exiting..." % (pidfile)) sys.exit(-1)
def test_extra(self): # test to __str__ config_specification = configobj.ConfigObj( debug_logs_default_paths.schema, list_values=False, _inspec=True) config = configobj.ConfigObj(configspec=config_specification) config.merge({'__extra__': True}) validator = Validator() result = config.validate(validator, preserve_errors=True) self.assertNotEqual(configobj.get_extra_values(config), None) # extra section extra = {'__extra__': True} with self.assertRaisesRegex(ExtraValuesError, "The following configuration sources"): ConfigManager(debug_logs_default_paths).get_config(extra) with self.assertRaisesRegex(ExtraValuesError, " 'extra' argument"): ConfigManager(debug_logs_default_paths).get_config(extra) with self.assertRaisesRegex( ExtraValuesError, "Extra entry in section 'top level'. Entry '__extra__' is a value" ): ConfigManager(debug_logs_default_paths).get_config(extra) # extra subsection, extra key extra = { 'debug_logs': { '__extra__': True, '__extra__2': { 'val': 'is_dict' } } } with self.assertRaisesRegex(ExtraValuesError, "Entry '__extra__2' is a section"): ConfigManager(debug_logs_default_paths).get_config(extra)
def test_invalid_config(self): # missing section config_specification = configobj.ConfigObj( debug_logs_default_paths.schema, list_values=False, _inspec=True) config_specification.merge({'__test__': {'enabled': 'boolean()'}}) config = configobj.ConfigObj(configspec=config_specification) validator = Validator() result = config.validate(validator, preserve_errors=True) InvalidConfigError([], config, result) self.assertNotEqual(result, True) # incorrect type extra = { 'debug_logs': { 'handlers': { '__test__': { 'class': 'NotAnOption', } } } } self.assertRaises( InvalidConfigError, lambda: ConfigManager(debug_logs_default_paths).get_config(extra)) # missing value extra = { 'debug_logs': { 'loggers': { '__test__': { 'template': '{source}' } } } } self.assertRaises( InvalidConfigError, lambda: ConfigManager(debug_logs_default_paths).get_config(extra))
def test_any_checker(self): validator = Validator() validator.functions['any'] = any_checker # Boolean: True self.assertIsInstance(validator.check('any', 'True'), bool) self.assertEqual(validator.check('any', 'True'), True) self.assertEqual(validator.check('any', 'yes'), True) # Boolean: False self.assertIsInstance(validator.check('any', 'False'), bool) self.assertEqual(validator.check('any', 'False'), False) self.assertEqual(validator.check('any', 'no'), False) # integers self.assertIsInstance(validator.check('any', '2'), int) self.assertEqual(validator.check('any', '2'), 2) # float self.assertIsInstance(validator.check('any', '2.1'), float) self.assertEqual(validator.check('any', '2.1'), 2.1) # lists self.assertEqual(validator.check('any', ','), []) self.assertEqual(validator.check('any', '1,'), [1]) self.assertEqual(validator.check('any', '1,2'), [1, 2]) self.assertEqual(validator.check('any', '1,false'), [1, False]) self.assertEqual(validator.check('any', '1,false, string'), [1, False, 'string']) self.assertEqual(validator.check('any', '1,false, string, 2.1'), [1, False, 'string', 2.1]) assert_value_equal( validator.check('any', '1,false, string, 2.1, nan'), [1, False, 'string', 2.1, float('nan')]) # string self.assertIsInstance(validator.check('any', 'string'), str) self.assertEqual(validator.check('any', 'string'), 'string')
def get_logging_config(): """Extract logging configuration from logging.conf. See this URL for example of config. https://gist.github.com/st4lk/6287746 See https://docs.python.org/3.5/library/logging.config.html Returns: dict: Dictionary suitable for use with logging.config.dictConfig(). """ install_path, data_path = get_config_paths() conf_file = os.path.join(install_path, 'config', 'logging.conf') spec_file = get_configspec(config='logging') log_config = ConfigObj(conf_file, configspec=spec_file, interpolation='template') val = Validator() results = log_config.validate(val) if not isinstance(results, bool) or not results: config_error(log_config, results) _clean_log_dict(log_config) # Here follows a bit of trickery... # To have a logger point to the root logger using the dictConfig() method, # you need to have the logger have a name equal to the empty string ''. # Our logging dictionary is originally specified using ConfigObj, which # does not allow for empty section headers. So, we need to get all of the # information from the logger we specify, copy it into a logger dictionary # with an empty key, and then delete the original logger from the config # dictionary. Whew. log_name = log_config['loggers'].keys()[0] log_config['loggers'][''] = log_config['loggers'][log_name] del log_config['loggers'][log_name] return log_config
def test_recenter(): configfilename = 'speckle_null_config.ini' config = ConfigObj(configfilename) configfilename = 'speckle_null_config.ini' hardwareconfigfile = 'speckle_instruments.ini' configspecfile = 'speckle_null_config.spec' config = ConfigObj(configfilename, configspec=configspecfile) val = Validator() check = config.validate(val) pharo = hardware.PHARO_COM('PHARO', configfile = hardwareconfigfile) p3k = hardware.P3K_COM('P3K_COM', configfile = hardwareconfigfile) #LOAD CURRENT FLATMAP print("\n\nBeginning DM REGISTRATION\n\n") time.sleep(2) print("Retrieving bgd, flat, badpix") bgds = flh.setup_bgd_dict(config) use_centoffs = config['NULLING']['cent_off'] initial_flatmap = p3k.grab_current_flatmap() p3k.safesend2('hwfp dm=off') DMamp = 33 kvecr = 33 additionmapx = DM.make_speckle_kxy(kvecr, 0,DMamp , 0) additionmapy = DM.make_speckle_kxy(0,kvecr, DMamp, 0) additionmap = additionmapx + additionmapy print ("sending new flatmap to p3k") status = p3k.load_new_flatmap((initial_flatmap + additionmap)) while True: image = pharo.take_src_return_imagedata(exptime = 4) dm_reg_autorun(image, configfilename, configspecfile) time.sleep(2) pass