def get_default_config_str(cls, encoding=None): '''Get the default configuration as a string''' return '\n '.join( configobj.ConfigObj(cls.__config_defaults[cls], encoding=encoding).write())
config_path: Path to weewx.conf""" parser = OptionParser(usage=usage_string) (options, args) = parser.parse_args() if len(args) < 1: sys.stderr.write("Missing argument(s).\n") sys.stderr.write(parser.parse_args(["--help"])) exit() config_path = args[0] weewx.debug = 1 try : config_dict = configobj.ConfigObj(config_path, file_error=True) except IOError: print "Unable to open configuration file ", config_path exit() if 'S3upload' not in config_dict: print >>sys.stderr, "No [S3upload] section in the configuration file %s" % config_path exit(1) engine = None S3upload = uploadFiles(engine, config_dict) rec = {'extraTemp1': 1.0, 'outTemp' : 38.2, 'dateTime' : int(time.time())}
def config_with_defaults(config=None): """Merge supplied config, if provided, on top of builtin defaults.""" defaults_copy = configobj.ConfigObj(constants.RENEWER_DEFAULTS) defaults_copy.merge( config if config is not None else configobj.ConfigObj()) return defaults_copy
if __name__ == "__main__": parser = argparse.ArgumentParser( description= 'Hummingbird mask tool. Creates mask from stack files in current directory and given configuration file.' ) parser.add_argument('config', type=str, help="Configuration file") parser.add_argument( '-l', '--link', type=str, help="Creates symbolic link to the H5 mask from given path") if (len(sys.argv) == 1): parser.print_help() args = parser.parse_args() C = configobj.ConfigObj(args.config) files = os.listdir(".") files = [f for f in files if len(f) > 3] files = [f for f in files if f[-3:] == ".h5"] files = get_valid_stacks(files) if len(files) == 0: sys.exit(0) s = get_dims(files[0]) mask = numpy.ones(shape=s, dtype="bool") if C["mean_max"].lower() != 'none': mask *= get_max_mask(files, "mean", float(C["mean_max"]))
# License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import configobj import sys import re from StringIO import StringIO try: (file, section, key) = sys.argv[1:] except ValueError: print "Usage: printconfigsetting.py <file> <section> <setting>" sys.exit(1) with open(file) as fh: content = re.sub('^\s*;', '#', fh.read(), flags=re.M) c = configobj.ConfigObj(StringIO(content)) try: s = c[section] except KeyError: print >> sys.stderr, "Section [%s] not found." % section sys.exit(1) try: print s[key] except KeyError: print >> sys.stderr, "Key %s not found." % key sys.exit(1)
def setup_database(config_filename): '''Uses config file to create database access singletons. ''' co = configobj.ConfigObj(config_filename) database.setup_database_from_dict(co['database'])
def runit(self, payload, file_pointer): test_data = json.load(file_pointer, object_hook=utils.byteify) config_dict = configobj.ConfigObj(test_data['config']) testruns = test_data['testruns'] cdict = config_dict['MQTTSubscribeService'] if not 'message_callback' in config_dict['MQTTSubscribeService']: config_dict['MQTTSubscribeService']['message_callback'] = {} config_dict['MQTTSubscribeService']['message_callback'][ 'type'] = payload driver = MQTTSubscribeDriver(**cdict) host = 'localhost' port = 1883 keepalive = 60 userdata = {'topics': [], 'connected_flag': False} client = mqtt.Client(userdata=userdata) client.on_connect = utils.on_connect client.connect(host, port, keepalive) client.loop_start() max_connect_wait = 1 # ToDo - configure i = 1 while not userdata['connected_flag']: if i > max_connect_wait: self.fail("Timed out waiting for connections.") time.sleep(1) i += 1 userdata2 = { 'topics': cdict['topics'].sections, 'connected_flag': False, 'msg': False, 'max_msg_wait': 1 # ToDo - configure } client2 = mqtt.Client(userdata=userdata2) client2.on_connect = utils.on_connect client2.on_message = utils.on_message client2.connect(host, port, keepalive) client2.loop_start() max_connect2_wait = 1 # ToDo - configure i = 1 while not userdata2['connected_flag']: #print("waiting to connect") if i > max_connect2_wait: self.fail("Timed out waiting for connection 2.") time.sleep(1) i += 1 max_waits = 10 for testrun in testruns: for topics in testrun['messages']: for topic in topics: topic_info = topics[topic] msg_count = utils.send_msg(utils.send_mqtt_msg, payload, client.publish, topic, topic_info, userdata2, self) wait_count = utils.wait_on_queue(driver, msg_count, max_waits, 1) # If queue not filled, fail now # otherwise will end up in 'infinite' loop in genLoopPackets if wait_count >= max_waits: self.fail("Could not fill queue.") records = [] gen = driver.genLoopPackets() results = testrun['results'] result = {} found = False for result in results: if 'driver' in result['test']: if payload in result['payloads']: found = True break self.assertTrue(found, "No results for %s" % payload) i = 0 # ToDo not great, but no way to know if more records # could possibly check the queues.. # Todo this needs to be fixed for new debug code while i < len(result['records']): data = next(gen, None) records.append(data) i += 1 utils.check(self, payload, records, result['records']) driver.closePort() client.disconnect() client2.disconnect()
import os import configobj from telegram.ext import ( Updater, CommandHandler, MessageHandler, Filters ) __author__ = 'Amir Hossein Abdullahi' # Read configs config_file = configobj.ConfigObj('.env') BOT_TOKEN = str(config_file['BOT_TOKEN']) FILES_DIR = str(config_file['FILES_DIR']).replace('\\', '/') try: os.mkdir(FILES_DIR) except: pass def start(update, context): """Send a message when the command /start sended.""" update.message.reply_text('Welcome to file to link bot') def help_command(update, context):
def test_existing_updated(self): cfg = {'loglevel': 'warn'} util.write_file(self.server_cfg, STOCK_CONFIG) cc_mcollective.configure(config=cfg, server_cfg=self.server_cfg) cfgobj = configobj.ConfigObj(self.server_cfg) self.assertEqual(cfg['loglevel'], cfgobj['loglevel'])
if __name__ == '__main__': import weewx import socket import configobj weewx.debug = 1 syslog.openlog('wee_ftpupload', syslog.LOG_PID | syslog.LOG_CONS) syslog.setlogmask(syslog.LOG_UPTO(syslog.LOG_DEBUG)) if len(sys.argv) < 2: print """Usage: ftpupload.py path-to-configuration-file [path-to-be-ftp'd]""" exit() try: config_dict = configobj.ConfigObj(sys.argv[1], file_error=True) except IOError: print "Unable to open configuration file ", sys.argv[1] raise if len(sys.argv) == 2: try: ftp_dir = os.path.join(config_dict['WEEWX_ROOT'], config_dict['StdReport']['HTML_ROOT']) except KeyError: print "No HTML_ROOT in configuration dictionary." exit() else: ftp_dir = sys.argv[2] socket.setdefaulttimeout(10)
def new_lineage(cls, lineagename, cert, privkey, chain, renewalparams=None, config=None, cli_config=None): # pylint: disable=too-many-locals,too-many-arguments """Create a new certificate lineage. Attempts to create a certificate lineage -- enrolled for potential future renewal -- with the (suggested) lineage name lineagename, and the associated cert, privkey, and chain (the associated fullchain will be created automatically). Optional configurator and renewalparams record the configuration that was originally used to obtain this cert, so that it can be reused later during automated renewal. Returns a new RenewableCert object referring to the created lineage. (The actual lineage name, as well as all the relevant file paths, will be available within this object.) :param str lineagename: the suggested name for this lineage (normally the current cert's first subject DNS name) :param str cert: the initial certificate version in PEM format :param str privkey: the private key in PEM format :param str chain: the certificate chain in PEM format :param configobj.ConfigObj renewalparams: parameters that should be used when instantiating authenticator and installer objects in the future to attempt to renew this cert or deploy new versions of it :param configobj.ConfigObj config: renewal configuration defaults, affecting, for example, the locations of the directories where the associated files will be saved :returns: the newly-created RenewalCert object :rtype: :class:`storage.renewableCert`""" config = config_with_defaults(config) # This attempts to read the renewer config file and augment or replace # the renewer defaults with any options contained in that file. If # renewer_config_file is undefined or if the file is nonexistent or # empty, this .merge() will have no effect. config.merge(configobj.ConfigObj(cli_config.renewer_config_file)) # Examine the configuration and find the new lineage's name for i in (cli_config.renewal_configs_dir, cli_config.archive_dir, cli_config.live_dir): if not os.path.exists(i): os.makedirs(i, 0700) config_file, config_filename = le_util.unique_lineage_name( cli_config.renewal_configs_dir, lineagename) if not config_filename.endswith(".conf"): raise errors.CertStorageError( "renewal config file name must end in .conf") # Determine where on disk everything will go # lineagename will now potentially be modified based on which # renewal configuration file could actually be created lineagename = os.path.basename(config_filename)[:-len(".conf")] archive = os.path.join(cli_config.archive_dir, lineagename) live_dir = os.path.join(cli_config.live_dir, lineagename) if os.path.exists(archive): raise errors.CertStorageError("archive directory exists for " + lineagename) if os.path.exists(live_dir): raise errors.CertStorageError("live directory exists for " + lineagename) os.mkdir(archive) os.mkdir(live_dir) relative_archive = os.path.join("..", "..", "archive", lineagename) # Put the data into the appropriate files on disk target = dict([(kind, os.path.join(live_dir, kind + ".pem")) for kind in ALL_FOUR]) for kind in ALL_FOUR: os.symlink(os.path.join(relative_archive, kind + "1.pem"), target[kind]) with open(target["cert"], "w") as f: f.write(cert) with open(target["privkey"], "w") as f: f.write(privkey) # XXX: Let's make sure to get the file permissions right here with open(target["chain"], "w") as f: f.write(chain) with open(target["fullchain"], "w") as f: # assumes that OpenSSL.crypto.dump_certificate includes # ending newline character f.write(cert + chain) # Document what we've done in a new renewal config file config_file.close() new_config = configobj.ConfigObj(config_filename, create_empty=True) for kind in ALL_FOUR: new_config[kind] = target[kind] if renewalparams: new_config["renewalparams"] = renewalparams new_config.comments["renewalparams"] = [ "", "Options and defaults used" " in the renewal process" ] # TODO: add human-readable comments explaining other available # parameters new_config.write() return cls(new_config, config, cli_config)
def get_config_str(self): '''Get the instance's config as a string''' return '\n '.join(configobj.ConfigObj(self._config).write())
def get_config(self, copy=True): '''Get the instance's config''' if copy: return configobj.ConfigObj(self._config) return self._config
def load_config(self, config=None, nested=None, apply=True, cfgpatch=None, encoding=None, **kwargs): '''Load / update the instance configuration :Params: - **config**: A configuration file (str) or object (ConfigObj) or None to load defaults, or an :class:`~argparse.ArgumentParser` object. - **nested**: Load from a nested config section instead of the whole config. If True, use the section name returned by :meth:`get_config_section_name()` Else if a string, use the section name defined by the **nested** string - **cfgpatch**: A manual patch to apply to the config once loaded. - Other options are passed to :meth:`~vacumm.misc.config.ConfigManager.arg_parse`. :Return: A :class:`ConfigObj` object or :class:`ConfigObj`,options tuple if an :class:`~argparse.ArgumentParser` object has been passed ''' mgr = self.get_config_manager(encoding=encoding) sec = self.get_config_section_name() if not hasattr(self, '_config'): self._config = self.get_default_config(encoding=encoding) self._options = None if config is not None: if isinstance(nested, basestring): sec = nested if isinstance(config, ArgumentParser): self._config, self._options = mgr.arg_parse( config, nested=nested and sec or nested, getargs=True, **kwargs) else: cfg = configobj.ConfigObj(config, interpolation=False, encoding=encoding) # If a nested section lookup is required # Otherwise, the whole passed config will be taken if nested and sec and sec in cfg: # If not found, self._config remain unchanged cfg = configobj.ConfigObj(cfg[sec], interpolation=False, encoding=encoding) self._config = mgr.load(cfg) if cfgpatch is not None: if not isinstance(cfgpatch, list): cfgpatch = [cfgpatch] for patch in cfgpatch: mgr.cfg_patch(self._config, patch) if apply: self.apply_config(self._config) if self._cfg_debug: self.debug( 'Loaded %s configuration:' '\n section: %s' '\n nested: %s' '\n from: %s' '\n loaded: ' '\n %s', self.__class__, sec, nested, config, '\n '.join(self._config.write())) return self._config
[[wind]] accumulator = vector extractor = wind [[windSpeed]] adder = add_wind merger = avg extractor = noop [[windDir]] extractor = noop [[windGust]] extractor = noop [[windGustDir]] extractor = noop """ import StringIO defaults = configobj.ConfigObj(StringIO.StringIO(defaults_ini)) del StringIO accum_type_dict = None add_dict = None merge_dict = None extract_dict = None def initialize(config_dict): """Must be called before using any of the accumulators""" global defaults, accum_type_dict, merge_dict, add_dict, extract_dict accum_type_dict = {} add_dict = {}
def generate_solver_file(): file_name = cases_folder + case_name + '.sharpy' settings = dict() settings['SHARPy'] = {'case': case_name, 'route': cases_folder, 'flow': flow, 'write_screen': 'on', 'write_log': 'on', 'log_folder': route + '/output/' + case_name, 'log_file': case_name + '.log'} settings['BeamLoader'] = {'unsteady': 'on', 'orientation': algebra.euler2quat(np.array([roll, alpha, beta]))} settings['AerogridLoader'] = {'unsteady': 'on', 'aligned_grid': 'on', 'mstar': m_star_factor * m, 'freestream_dir': ['1', '0', '0']} settings['NonLinearStatic'] = {'print_info': 'off', 'max_iterations': 150, 'num_load_steps': 1, 'delta_curved': 1e-1, 'min_delta': tolerance, 'gravity_on': gravity, 'gravity': 9.81} settings['StaticUvlm'] = {'print_info': 'on', 'horseshoe': 'off', 'num_cores': num_cores, 'n_rollup': 0, 'rollup_dt': dt, 'rollup_aic_refresh': 1, 'vortex_radius': 1e-8, 'rollup_tolerance': 1e-4, 'velocity_field_generator': 'SteadyVelocityField', 'velocity_field_input': {'u_inf': u_inf, 'u_inf_direction': [1., 0, 0]}, 'rho': rho} settings['StaticCoupled'] = {'print_info': 'off', 'structural_solver': 'NonLinearStatic', 'structural_solver_settings': settings['NonLinearStatic'], 'aero_solver': 'StaticUvlm', 'aero_solver_settings': settings['StaticUvlm'], 'max_iter': 100, 'n_load_steps': n_step, 'tolerance': fsi_tolerance, 'relaxation_factor': structural_relaxation_factor} settings['StaticTrim'] = {'solver': 'StaticCoupled', 'solver_settings': settings['StaticCoupled'], 'initial_alpha': alpha, 'initial_deflection': cs_deflection, 'initial_thrust': thrust} settings['NonLinearDynamicCoupledStep'] = {'print_info': 'off', 'max_iterations': 950, 'delta_curved': 1e-1, 'min_delta': tolerance, 'newmark_damp': 5e-3, 'gravity_on': gravity, 'gravity': 9.81, 'num_steps': n_tstep, 'dt': dt, 'initial_velocity': u_inf} settings['NonLinearDynamicPrescribedStep'] = {'print_info': 'off', 'max_iterations': 950, 'delta_curved': 1e-1, 'min_delta': tolerance, 'newmark_damp': 5e-3, 'gravity_on': gravity, 'gravity': 9.81, 'num_steps': n_tstep, 'dt': dt, 'initial_velocity': u_inf*int(free_flight)} relative_motion = 'off' if not free_flight: relative_motion = 'on' settings['StepUvlm'] = {'print_info': 'off', 'horseshoe': 'off', 'num_cores': num_cores, 'n_rollup': 0, 'convection_scheme': 2, 'rollup_dt': dt, 'rollup_aic_refresh': 1, 'rollup_tolerance': 1e-4, 'gamma_dot_filtering': 6, 'vortex_radius': 1e-8, 'velocity_field_generator': 'GustVelocityField', 'velocity_field_input': {'u_inf': int(not free_flight)*u_inf, 'u_inf_direction': [1., 0, 0], 'gust_shape': '1-cos', 'gust_length': gust_length, 'gust_intensity': gust_intensity*u_inf, 'offset': gust_offset, 'span': span_main, 'relative_motion': relative_motion}, 'rho': rho, 'n_time_steps': n_tstep, 'dt': dt} if free_flight: solver = 'NonLinearDynamicCoupledStep' else: solver = 'NonLinearDynamicPrescribedStep' settings['DynamicCoupled'] = {'structural_solver': solver, 'structural_solver_settings': settings[solver], 'aero_solver': 'StepUvlm', 'aero_solver_settings': settings['StepUvlm'], 'fsi_substeps': 200, 'fsi_tolerance': fsi_tolerance, 'relaxation_factor': relaxation_factor, 'minimum_steps': 1, 'relaxation_steps': 150, 'final_relaxation_factor': 0.5, 'n_time_steps': 1, 'dt': dt, 'include_unsteady_force_contribution': 'off', 'postprocessors': ['BeamLoads', 'BeamPlot', 'AerogridPlot'], 'postprocessors_settings': {'BeamLoads': {'folder': route + '/output/', 'csv_output': 'off'}, 'BeamPlot': {'folder': route + '/output/', 'include_rbm': 'on', 'include_applied_forces': 'on'}, 'AerogridPlot': { 'folder': route + '/output/', 'include_rbm': 'on', 'include_applied_forces': 'on', 'minus_m_star': 0}, }} settings['BeamLoads'] = {'folder': route + '/output/', 'csv_output': 'off'} settings['BeamPlot'] = {'folder': route + '/output/', 'include_rbm': 'on', 'include_applied_forces': 'on', 'include_forward_motion': 'on'} settings['AeroForcesCalculator'] = {'folder': route + '/output/', 'write_text_file': 'on', 'screen_output': 'on'} settings['AerogridPlot'] = {'folder': route + '/output/', 'include_rbm': 'on', 'include_forward_motion': 'off', 'include_applied_forces': 'on', 'minus_m_star': 0, 'u_inf': u_inf, 'dt': dt} settings['Modal'] = {'print_info': 'on', 'use_undamped_modes': 'on', 'NumLambda': num_modes, 'rigid_body_modes': free_flight, 'write_modes_vtk': 'on', 'print_matrices': 'off', 'write_data': 'on', 'rigid_modes_cg': 'off'} settings['LinearAssembler'] = {'linear_system': 'LinearAeroelastic', 'linear_system_settings': { 'track_body': 'off', 'beam_settings': {'modal_projection': 'on', 'inout_coords': 'modes', 'discrete_time': 'on', 'newmark_damp': 5e-4, 'discr_method': 'newmark', 'dt': dt, 'proj_modes': 'undamped', 'use_euler': 'on', 'num_modes': num_modes, 'print_info': 'on', 'gravity': 'on', 'remove_dofs': []}, 'aero_settings': {'dt': dt, 'ScalingDict': {'density': rho, 'length': chord_main * 0.5, 'speed': u_inf}, 'integr_order': 2, 'density': rho, 'remove_predictor': 'off', 'use_sparse': 'on', 'vortex_radius': 1e-8, 'remove_inputs': ['u_gust']}, 'rigid_body_motion': free_flight, 'use_euler': 'on', } } if rom: settings['LinearAssembler']['linear_system_settings']['aero_settings']['rom_method'] = ['Krylov'] settings['LinearAssembler']['linear_system_settings']['aero_settings']['rom_method_settings'] = { 'Krylov': {'algorithm': 'mimo_rational_arnoldi', 'frequency': [0.], 'r': 4, 'single_side': 'observability'}} settings['AsymptoticStability'] = {'sys_id': 'LinearAeroelastic', 'print_info': 'on', 'modes_to_plot': [], 'display_root_locus': 'off', 'frequency_cutoff': 0, 'export_eigenvalues': 'off', 'num_evals': 40, 'folder': route + '/output/'} settings['FrequencyResponse'] = {'folder': route + '/output/', 'target_system': ['aerodynamic'], 'frequency_spacing': 'linear', 'frequency_bounds': [0, 1], 'num_freqs': 1} settings['SaveData'] = {'folder': route + '/output/' + case_name + '/', 'save_aero': 'off', 'save_struct': 'off', 'save_linear': 'on', 'save_linear_uvlm': 'on', 'format': 'mat' } settings['LinDynamicSim'] = {'folder': route + '/output/', 'n_tsteps': 10, 'dt': dt, 'postprocessors': ['AerogridPlot'], 'postprocessors_settings': {'AerogridPlot': {'folder': route + '/output/', 'include_rbm': 'on', 'include_applied_forces': 'on', 'minus_m_star': 0}, } } import configobj config = configobj.ConfigObj() config.filename = file_name for k, v in settings.items(): config[k] = v config.write()
def full_config(filename): co = configobj.ConfigObj(filename) database.setup_database_from_dict(co['database']) logger.setup_logging_from_dict(co['logging'])
def _delete_if_appropriate(config): # pylint: disable=too-many-locals,too-many-branches """Does the user want to delete their now-revoked certs? If run in non-interactive mode, deleting happens automatically, unless if both `--cert-name` and `--cert-path` were specified with conflicting values. :param config: parsed command line arguments :type config: interfaces.IConfig :returns: `None` :rtype: None :raises errors.Error: If anything goes wrong, including bad user input, if an overlapping archive dir is found for the specified lineage, etc ... """ display = zope.component.getUtility(interfaces.IDisplay) reporter_util = zope.component.getUtility(interfaces.IReporter) attempt_deletion = config.delete_after_revoke if attempt_deletion is None: msg = ("Would you like to delete the cert(s) you just revoked?") attempt_deletion = display.yesno(msg, yes_label="Yes (recommended)", no_label="No", force_interactive=True, default=True) if not attempt_deletion: reporter_util.add_message("Not deleting revoked certs.", reporter_util.LOW_PRIORITY) return if not (config.certname or config.cert_path): raise errors.Error( 'At least one of --cert-path or --cert-name must be specified.') if config.certname and config.cert_path: # first, check if certname and cert_path imply the same certs implied_cert_name = cert_manager.cert_path_to_lineage(config) if implied_cert_name != config.certname: cert_path_implied_cert_name = cert_manager.cert_path_to_lineage( config) cert_path_implied_conf = storage.renewal_file_for_certname( config, cert_path_implied_cert_name) cert_path_cert = storage.RenewableCert(cert_path_implied_conf, config) cert_path_info = cert_manager.human_readable_cert_info( config, cert_path_cert, skip_filter_checks=True) cert_name_implied_conf = storage.renewal_file_for_certname( config, config.certname) cert_name_cert = storage.RenewableCert(cert_name_implied_conf, config) cert_name_info = cert_manager.human_readable_cert_info( config, cert_name_cert) msg = ( "You specified conflicting values for --cert-path and --cert-name. " "Which did you mean to select?") choices = [cert_path_info, cert_name_info] try: code, index = display.menu(msg, choices, ok_label="Select", force_interactive=True) except errors.MissingCommandlineFlag: error_msg = ( 'To run in non-interactive mode, you must either specify only one of ' '--cert-path or --cert-name, or both must point to the same certificate lineages.' ) raise errors.Error(error_msg) if code != display_util.OK or not index in range(0, len(choices)): raise errors.Error("User ended interaction.") if index == 0: config.certname = cert_path_implied_cert_name else: config.cert_path = storage.cert_path_for_cert_name( config, config.certname) elif config.cert_path: config.certname = cert_manager.cert_path_to_lineage(config) else: # if only config.certname was specified config.cert_path = storage.cert_path_for_cert_name( config, config.certname) # don't delete if the archive_dir is used by some other lineage archive_dir = storage.full_archive_path( configobj.ConfigObj( storage.renewal_file_for_certname(config, config.certname)), config, config.certname) try: cert_manager.match_and_check_overlaps(config, [lambda x: archive_dir], lambda x: x.archive_dir, lambda x: x) except errors.OverlappingMatchFound: msg = ( 'Not deleting revoked certs due to overlapping archive dirs. More than ' 'one lineage is using {0}'.format(archive_dir)) reporter_util.add_message(''.join(msg), reporter_util.MEDIUM_PRIORITY) return except Exception as e: msg = ( 'config.default_archive_dir: {0}, config.live_dir: {1}, archive_dir: {2},' 'original exception: {3}') msg = msg.format(config.default_archive_dir, config.live_dir, archive_dir, e) raise errors.Error(msg) cert_manager.delete(config)
Check for 8 or 9 digits phone :param str phone: phone """ if len(str(phone)) == 8: return '9' + str(phone) if len(str(phone)) == 9: return str(phone)[1:] return False app.debug = True if __name__ == '__main__': fileHandler = logging.FileHandler("{0}/{1}.log".format( './log', 'debuglog')) fileHandler.setFormatter(logFormatter) rootLogger.addHandler(fileHandler) config = configobj.ConfigObj('config.ini') users = configobj.ConfigObj('users.ini') crateconf = config['crate'] app.config['SQLALCHEMY_DATABASE_URI'] = '%s%s%s' % ( crateconf['protocol'], crateconf['host'], crateconf['port']) app.config['CONFIG'] = users db = SQLAlchemy(app) app.run() app.debug(True)
#!/usr/bin/env python import logging import sys import configobj import bqueryd config = configobj.ConfigObj('/etc/bqueryd.cfg') redis_url = config.get('redis_url', 'redis://127.0.0.1:6379/0') azure_conn_string = config.get('azure_conn_string', None) def main(argv=sys.argv): if '-vvv' in argv: loglevel = logging.DEBUG elif '-vv' in argv: loglevel = logging.INFO elif '-v' in argv: loglevel = logging.WARNING else: loglevel = logging.ERROR data_dir = bqueryd.DEFAULT_DATA_DIR for arg in argv: if arg.startswith('--data_dir='): data_dir = arg[11:] if 'controller' in argv: bqueryd.ControllerNode(redis_url=redis_url, loglevel=loglevel, azure_conn_string=azure_conn_string).go() elif 'worker' in argv:
def generate_solver_file(horseshoe=False): file_name = route + '/' + case_name + '.sharpy' # config = configparser.ConfigParser() import configobj config = configobj.ConfigObj() config.filename = file_name config['SHARPy'] = {'case': case_name, 'route': route, 'flow': ['BeamLoader', 'AerogridLoader', 'StaticCoupled', 'DynamicPrescribedCoupled', # 'PrescribedUvlm', 'AerogridPlot', # 'NonLinearDynamic', 'BeamPlot', # 'AeroForcesCalculator', 'BeamCsvOutput'], 'write_screen': 'on', 'write_log': 'on', 'log_folder': route + '/output/', 'log_file': case_name + '.log'} config['BeamLoader'] = {'unsteady': 'on', 'orientation': algebra.euler2quat(np.array([0.0, alpha_rad, beta*np.pi/180]))} config['StaticCoupled'] = {'print_info': 'on', 'structural_solver': 'NonLinearStatic', 'structural_solver_settings': {'print_info': 'off', 'max_iterations': 150, 'num_load_steps': 10, 'delta_curved': 1e-5, 'min_delta': 1e-5, 'gravity_on': 'off', 'gravity': 9.754, 'orientation': algebra.euler2quat(np.array([0.0, alpha_rad, beta*np.pi/180]))}, 'aero_solver': 'StaticUvlm', 'aero_solver_settings': {'print_info': 'off', 'horseshoe': 'off', 'num_cores': 4, 'n_rollup': 0, 'rollup_dt': main_chord/m_main/u_inf, 'rollup_aic_refresh': 1, 'rollup_tolerance': 1e-4, 'velocity_field_generator': 'SteadyVelocityField', 'velocity_field_input': {'u_inf': u_inf, 'u_inf_direction': [1., 0, 0]}, 'rho': rho, 'alpha': alpha_rad, 'beta': beta}, 'max_iter': 80, 'n_load_steps': 3, 'tolerance': 1e-4, 'relaxation_factor': 0.0} config['NonLinearDynamic'] = {'print_info': 'off', 'max_iterations': 150, 'num_load_steps': 4, 'delta_curved': 1e-5, 'min_delta': 1e-5, 'newmark_damp': 5e-4, 'gravity_on': 'on', 'gravity': 9.754, 'num_steps': num_steps, 'dt': dt, 'prescribed_motion': 'on'} config['PrescribedUvlm'] = {'print_info': 'off', 'horseshoe': 'off', 'num_cores': 4, 'n_rollup': 100, 'convection_scheme': 3, 'rollup_dt': main_chord/m_main/u_inf, 'rollup_aic_refresh': 1, 'rollup_tolerance': 1e-4, 'velocity_field_generator': 'SteadyVelocityField', 'velocity_field_input': {'u_inf': u_inf, 'u_inf_direction': [1., 0, 0]}, 'rho': rho, 'alpha': alpha_rad, 'beta': beta, 'n_time_steps': num_steps, 'dt': dt} config['DynamicPrescribedCoupled'] = {'print_info': 'on', 'structural_solver': 'NonLinearDynamicPrescribedStep', 'structural_solver_settings': {'print_info': 'off', 'max_iterations': 150, 'num_load_steps': 10, 'delta_curved': 1e-5, 'min_delta': 1e-5, 'newmark_damp': 1e-3, 'gravity_on': 'off', 'gravity': 9.754, 'num_steps': num_steps, 'dt': dt}, 'aero_solver': 'StepUvlm', 'aero_solver_settings': {'print_info': 'off', 'horseshoe': 'off', 'num_cores': 4, 'n_rollup': 100, 'convection_scheme': 3, 'rollup_dt': main_chord/m_main/u_inf, 'rollup_aic_refresh': 1, 'rollup_tolerance': 1e-4, 'velocity_field_generator': 'SteadyVelocityField', 'velocity_field_input': {'u_inf': u_inf, 'u_inf_direction': [1., 0, 0]}, 'rho': rho, 'alpha': alpha_rad, 'beta': beta, 'n_time_steps': num_steps, 'dt': dt}, 'max_iter': 100, 'tolerance': 1e-6, 'relaxation_factor': 0., 'n_time_steps': num_steps, 'dt': dt, 'structural_substeps': 10} if horseshoe is True: config['AerogridLoader'] = {'unsteady': 'on', 'aligned_grid': 'on', 'mstar': 1, 'freestream_dir': ['1', '0', '0']} else: config['AerogridLoader'] = {'unsteady': 'on', 'aligned_grid': 'on', 'mstar': 150, 'freestream_dir': ['1', '0', '0']} config['AerogridPlot'] = {'folder': route + '/output/', 'include_rbm': 'on', 'include_applied_forces': 'on', 'minus_m_star': 0 } config['AeroForcesCalculator'] = {'folder': route + '/output/forces', 'write_text_file': 'on', 'text_file_name': case_name + '_aeroforces.csv', 'screen_output': 'on', 'unsteady': 'off' } config['BeamPlot'] = {'folder': route + '/output/', 'include_rbm': 'on', 'include_applied_forces': 'on'} config['BeamCsvOutput'] = {'folder': route + '/output/', 'output_pos': 'on', 'output_psi': 'on', 'output_for_pos': 'on', 'screen_output': 'off'} config.write()
parser = optparse.OptionParser() parser.add_option("--config", dest="config_path", metavar="CONFIG_FILE", default="/home/weewx/weewx.conf", help="Use configuration file CONFIG_FILE") parser.add_option("--debug", action="store_true", help="Enable verbose logging") (options, args) = parser.parse_args() debug = 0 if options.debug: weewx.debug = 2 debug = 1 cfg = dict() try: cfg = configobj.ConfigObj(options.config_path) except IOError: pass stdrep = cfg.get('StdReport', {}) gen = PolarPlotGenerator( cfg, build_skin_dict(cfg, 'SeasonsReport'), gen_ts=None, first_run=None, stn_info=weewx.station.StationInfo(**cfg['Station'])) gen.run()
def main(devel, homol, prod, debug, swagger, instances): global slog, server, app # armazeno o sinal para o desligamento # signal.signal(signal.SIGTERM, lambda sig, frame: tornado.ioloop.IOLoop().instance().add_callback(tornado.ioloop.IOLoop().instance().stop)) signal.signal(signal.SIGINT, sig_handler) # Configuracoes do server serverConfig = configobj.ConfigObj('server.ini') # Configuracoes do app appConfig = configobj.ConfigObj('config.ini') # Caminho real dos arquivos path, fileName = os.path.split(os.path.realpath(__file__)) # Import do utils pela versao do toth # IMPORTLIB soh usa variaveis LOCAIS utils = importlib.import_module('utils.%s' % (appConfig['versions']['utils'])) # Para utilizar a sessoes sess = dict() # Valor default para dev e homol if not instances: instances = 2 slog = utils.setup_logger('tornado_oauthlib', logging.DEBUG if debug else logging.INFO, True if devel else False) # Desabilita o log do Suds logging.getLogger('suds').setLevel(logging.INFO) # Caso no ini esteja DEBUG ativo, grava log if devel: slog.info('DEVEL MODE') # Swagger urlBase swaggerUrlBase = appConfig['swagger']['dev'] debug = True level = 'devel' msgUpserver = 'Server Develop Starting' elif homol: slog.info('HOMOL MODE') # Swagger urlBase swaggerUrlBase = appConfig['swagger']['homol'] level = 'homol' msgUpserver = 'Server Homol Starting' else: slog.info('PROD MODE') # Swagger urlBase swaggerUrlBase = appConfig['swagger']['prod'] level = 'prod' # Configuracoes do app para PROD appConfigProd = configobj.ConfigObj('configProd.ini') appConfig.update(appConfigProd) msgUpserver = 'Server Production Starting' if debug: slog.info('DEBUG MODE ON') else: slog.info('INFO MODE ON') # Lista com as urls URLS = [] # configs para o tornado settings = dict() # Urls do swagger - documentacao if (swagger): slog.info('Loading Swagger...') # Cria os json para o swagger os.system('rm -rf documentacao/json-generated') swaggerDir = { 'yaml_dir': os.path.join(path, 'documentacao', 'yaml'), 'json_dir': os.path.join(path, 'documentacao', 'json-generated'), 'browser_dir': os.path.join(path, 'documentacao', 'browser') } yaml_to_json( swaggerDir['yaml_dir'], swaggerDir['json_dir'], '', '%s:%s' % (swaggerUrlBase, serverConfig['tornado']['port']), appConfig['statusCode']) slog.info('Swagger Load.') for sDir in os.listdir(swaggerDir['json_dir']): if os.path.isdir(os.path.join(swaggerDir['json_dir'], sDir)): URLS += [(r'/apiDoc/%s(.*)' % (sDir), tornado.web.StaticFileHandler, dict(path='%s/%s/swagger.json' % (swaggerDir['json_dir'], sDir)))] # atualiza a rota para o swagger URLS += [(r'/apiDoc/(.*)', tornado.web.StaticFileHandler, dict(path='%s/swagger.json' % (swaggerDir['json_dir']))), (r'/', tornado.web.RedirectHandler, dict(url='/documentacao/browser/index.html'))] # settings para o swagger settings.update({ 'static_path': swaggerDir['browser_dir'], 'static_url_prefix': '/documentacao/browser/' }) # Caso o arquivo de conf nao tenha virgula no final dos campos # O configobj nao cria uma lista com apenas 1 item if type(appConfig['versions']['available']) is str: appConfig['versions']['available'] = [ appConfig['versions']['available'] ] # Dicionario com os objetos dos wsdl wsdlClientes = dict() for provisionerVersion in appConfig['versions']['available']: # Loopa as operadoras do arquivo de config for iOperator in appConfig['operators']: # Caso o arquivo de conf nao tenha virgula no final dos campos # O configobj nao cria uma lista com apenas 1 item if type(appConfig['operators'][iOperator]['vendors']) is str: appConfig['operators'][iOperator]['vendors'] = [ appConfig['operators'][iOperator]['vendors'] ] # Caso o vendor nao tenha alguma versao if provisionerVersion not in appConfig['operators'][iOperator][ 'vendors'].keys(): continue # Loopa os vendors do arquivo de config for iVendor in appConfig['operators'][iOperator]['vendors'][ provisionerVersion].keys(): # level in appConfig['operators'][iOperator]['vendors'][provisionerVersion][iVendor].keys() # Pega as configuracoes de endpoint sendo homol, dev ou prod if (level == 'homol' or level == 'devel' ) and 'homol' in appConfig['operators'][iOperator][ 'vendors'][provisionerVersion][iVendor].keys(): appConfig['operators'][iOperator]['vendors'][provisionerVersion][iVendor] = \ appConfig['operators'][iOperator]['vendors'][provisionerVersion][iVendor]['homol'] else: appConfig['operators'][iOperator]['vendors'][provisionerVersion][iVendor] = \ appConfig['operators'][iOperator]['vendors'][provisionerVersion][iVendor]['prod'] # Loopa os tipos de operacoes do arquivo de config subUrl = "" for iType in appConfig['vendors'][provisionerVersion][iVendor]: # Load nos wsdl if 'wsdl' in appConfig['operators'][iOperator]['vendors'][ provisionerVersion][iVendor].keys(): urlRestBase = '/%s/vendors/%s/%s/wsdl/' % ( path, provisionerVersion, iVendor) urlWsdl = 'file:/%s%s' % ( urlRestBase, appConfig['operators'][iOperator]['vendors'] [provisionerVersion][iVendor]['urlWsdl']) # Cria a estancia do soap if 'cert' in appConfig['operators'][iOperator][ 'vendors'][provisionerVersion][iVendor].keys(): wsdlClientes[iOperator] = { iVendor: Client( urlWsdl, location=appConfig['operators'][iOperator] ['vendors'][provisionerVersion][iVendor] ['urlBase'], headers={ "Content-TYpe": "text/xml;charset=UTF-8", "SOAPAction": "" }, transport=RequestsTransport( cert='%s/certificado/%s' % (urlRestBase, appConfig['operators'][iOperator] ['vendors'][provisionerVersion] [iVendor]['cert'])), faults=False) } else: wsdlClientes[iOperator] = { iVendor: Client(urlWsdl, faults=False) } if iType not in subUrl: # Cria a url vendorsClass = getattr( importlib.import_module( 'vendors.%s.%s' % (provisionerVersion, iVendor)), iVendor.title()) URLS.append( (r'/%s/%s/%s/%s/([0-9A-Za-z@_\-.]+)?(/)?' % (provisionerVersion, iVendor, iOperator, iType), vendorsClass, dict(provisionerVersion=provisionerVersion, vendor=iVendor, operator=iOperator, typeVendor=iType, subUrl=False, appConfig=appConfig, wsdlClientes=wsdlClientes, utils=utils, debug=debug, level=level, sess=sess))) # Verifica se existe uma subUrl if 'suburl' in appConfig['vendors'][ provisionerVersion][iVendor][iType].keys(): subUrl = appConfig['vendors'][provisionerVersion][ iVendor][iType]['suburl']['suburl'] if type(subUrl) is str: subUrl = [subUrl] # Cria a subUrl for sub in subUrl: vendorsClass = getattr( importlib.import_module( 'vendors.%s.%s' % (provisionerVersion, iVendor)), iVendor.title()) URLS.append(( r'/%s/%s/%s/%s/([0-9A-Za-z@_\-.]+)?/%s/([0-9A-Za-z@_\-.]+)?' % (provisionerVersion, iVendor, iOperator, iType, sub), vendorsClass, dict(provisionerVersion=provisionerVersion, vendor=iVendor, operator=iOperator, typeVendor=sub, subUrl=True, appConfig=appConfig, wsdlClientes=wsdlClientes, utils=utils, debug=debug, level=level, sess=sess))) # Limpa a variavel caso outro vendor nao tenha e nao # duplicar na url iType = "" slog.info( '%s - port:%s instances:%s' % (msgUpserver, serverConfig['tornado']['port'], serverConfig['tornado']['instances'] if not instances else instances)) # Pega as urls validas para as versoes app = tornado.web.Application(URLS, **settings) server = tornado.httpserver.HTTPServer(app, xheaders=True) server.bind(int(serverConfig['tornado']['port'])) server.start( int(serverConfig['tornado']['instances'] ) if not instances else instances) tornado.ioloop.IOLoop().instance().start()
def get_config(config_path): """ Retrieve the config object from config_path. :param config_path: str: full path to config file :return: """ import configobj from validate import Validator dir_path = os.path.dirname(os.path.realpath(__file__)) config_spec = '%s/configspec' % dir_path try: config = configobj.ConfigObj(config_path, configspec=config_spec, file_error=True) except IOError: try: config = configobj.ConfigObj(config_path + '.config', configspec=config_spec, file_error=True) except IOError: print('Error: unable to open configuration file %s' % config_path) raise except configobj.ConfigObjError as e: print('Error while parsing configuration file %s' % config_path) print("*** Reason: '%s'" % e) raise config.validate(Validator()) # Make sure site_directory is there if config['SITE_ROOT'] == '': config['SITE_ROOT'] = '%(MOSX_ROOT)s/site_data' # Make sure BUFR parameters have defaults if config['BUFR']['bufr_station_id'] == '': config['BUFR']['bufr_station_id'] = '%(station_id)s' if config['BUFR']['bufr_data_dir'] == '': config['BUFR']['bufr_data_dir'] = '%(SITE_ROOT)s/bufkit' if config['BUFR']['bufrgruven'] == '': config['BUFR']['bufrgruven'] = '%(BUFR_ROOT)s/bufr_gruven.pl' # Make sure Obs parameters have defaults if config['Obs']['sounding_data_dir'] == '': config['Obs']['sounding_data_dir'] = '%(SITE_ROOT)s/soundings' # Add in a list for BUFR models config['BUFR']['bufr_models'] = [] for model in config['BUFR']['models']: if model.upper() == 'GFS': config['BUFR']['bufr_models'].append(['gfs3', 'gfs']) else: config['BUFR']['bufr_models'].append(model.lower()) # Convert kwargs, Rain tuning, Ada boosting, and Bootstrapping, if available, to int or float types config['Model']['Parameters'].walk(walk_kwargs) try: config['Model']['Ada boosting'].walk(walk_kwargs) except KeyError: pass try: config['Model']['Rain tuning'].walk(walk_kwargs) except KeyError: pass try: config['Model']['Bootstrapping'].walk(walk_kwargs) except KeyError: pass return config
wind_height = 2.0 et_period = 3600 [[[heatindex]]] algorithm = new [[PressureCooker]] max_delta_12h = 1800 [[[altimeter]]] algorithm = aaASOS # Case-sensitive! [[RainRater]] rain_period = 900 retain_period = 930 [[Delta]] [[[rain]]] input = totalRain """ defaults_dict = configobj.ConfigObj(StringIO(DEFAULTS_INI), encoding='utf-8') first_time = True class WXXTypes(weewx.xtypes.XType): """Weather extensions to the WeeWX xtype system that are relatively simple. These types are generally stateless, such as dewpoint, heatindex, etc. """ def __init__(self, altitude_vt, latitude_f, longitude_f, et_period=3600, atc=0.8, nfac=2, wind_height=2.0,
import configobj all_simulations = ["dust", "synchrotron", "ame", "freefree"] model = "0" for nside in [512, 4096]: small_scale = "s" if nside > 512 else "" for content in all_simulations: config = configobj.ConfigObj() config["content"] = content config["pysm_components"] = dict(pysm_components_string="SO_" + content[0] + model + small_scale) config.filename = "{}_{}.cfg".format(content, nside) config.write()
def delete_files(config, certname): """Delete all files related to the certificate. If some files are not found, ignore them and continue. """ renewal_filename = renewal_file_for_certname(config, certname) # file exists full_default_archive_dir = full_archive_path(None, config, certname) full_default_live_dir = _full_live_path(config, certname) try: renewal_config = configobj.ConfigObj(renewal_filename) except configobj.ConfigObjError: # config is corrupted logger.warning( "Could not parse %s. You may wish to manually " "delete the contents of %s and %s.", renewal_filename, full_default_live_dir, full_default_archive_dir) raise errors.CertStorageError( "error parsing {0}".format(renewal_filename)) finally: # we couldn't read it, but let's at least delete it # if this was going to fail, it already would have. os.remove(renewal_filename) logger.debug("Removed %s", renewal_filename) # cert files and (hopefully) live directory # it's not guaranteed that the files are in our default storage # structure. so, first delete the cert files. directory_names = set() for kind in ALL_FOUR: link = renewal_config.get(kind) try: os.remove(link) logger.debug("Removed %s", link) except OSError: logger.debug("Unable to delete %s", link) directory = os.path.dirname(link) directory_names.add(directory) # if all four were in the same directory, and the only thing left # is the README file (or nothing), delete that directory. # this will be wrong in very few but some cases. if len(directory_names) == 1: # delete the README file directory = directory_names.pop() readme_path = os.path.join(directory, README) try: os.remove(readme_path) logger.debug("Removed %s", readme_path) except OSError: logger.debug("Unable to delete %s", readme_path) # if it's now empty, delete the directory try: os.rmdir(directory) # only removes empty directories logger.debug("Removed %s", directory) except OSError: logger.debug("Unable to remove %s; may not be empty.", directory) # archive directory try: archive_path = full_archive_path(renewal_config, config, certname) shutil.rmtree(archive_path) logger.debug("Removed %s", archive_path) except OSError: logger.debug("Unable to remove %s", archive_path)
dest="collector", default=None, help="Configure a single collector") parser.add_option("-p", "--print", action="store_true", dest="dump", default=False, help="Just print the defaults") # Parse Command Line Args (options, args) = parser.parse_args() # Initialize Config if os.path.exists(options.configfile): config = configobj.ConfigObj(os.path.abspath(options.configfile)) else: print >> sys.stderr, "ERROR: Config file: %s does not exist." % ( options.configfile) print >> sys.stderr, ("Please run python config.py -c " + "/path/to/diamond.conf") parser.print_help(sys.stderr) sys.exit(1) collector_path = config['server']['collectors_path'] docs_path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'docs')) handler_path = os.path.abspath( os.path.join(os.path.dirname(__file__), 'src', 'diamond', 'handler')) getIncludePaths(collector_path)
def load(self, filename): # create ConfigObj instance try: config = configobj.ConfigObj(filename, configspec=configspec, encoding=self._config_encoding) except configobj.ParseError: traceback.print_exc() config = configobj.ConfigObj(configspec=configspec, encoding=self._config_encoding) self._config = config # create sections for section in ( 'general', 'sound_samples', 'fonts', 'colors', 'timeouts', 'cardsets', 'games_geometry', 'offsets', ): if section not in config: config[section] = {} # add initial comment if not os.path.exists(filename): config.initial_comment = [ '-*- coding: %s -*-' % self._config_encoding ] return # validation vdt = validate.Validator() res = config.validate(vdt) # from pprint import pprint; pprint(res) if isinstance(res, dict): for section, data in res.items(): if data is True: continue for key, value in data.items(): if value is False: print_err('config file: validation error: ' 'section: "%s", key: "%s"' % (section, key)) config[section][key] = None # general for key, t in self.GENERAL_OPTIONS: val = self._getOption('general', key, t) if val == 'None': setattr(self, key, None) elif val is not None: setattr(self, key, val) pysollib.settings.TRANSLATE_GAME_NAMES = self.translate_game_names recent_gameid = self._getOption('general', 'recent_gameid', 'list') if recent_gameid is not None: try: self.recent_gameid = [int(i) for i in recent_gameid] except Exception: traceback.print_exc() favorite_gameid = self._getOption('general', 'favorite_gameid', 'list') if favorite_gameid is not None: try: self.favorite_gameid = [int(i) for i in favorite_gameid] except Exception: traceback.print_exc() visible_buttons = self._getOption('general', 'visible_buttons', 'list') if visible_buttons is not None: for key in TOOLBAR_BUTTONS: self.toolbar_vars[key] = (key in visible_buttons) myGettext.language = self.language # solver solver_presets = self._getOption('general', 'solver_presets', 'list') if solver_presets is not None: if 'none' not in solver_presets: solver_presets.insert(0, 'none') self.solver_presets = solver_presets # sound_samples for key in self.sound_samples: val = self._getOption('sound_samples', key, 'bool') if val is not None: self.sound_samples[key] = val # fonts for key in self.fonts: if key == 'default': continue val = self._getOption('fonts', key, 'str') if val is not None: try: val[1] = int(val[1]) except Exception: traceback.print_exc() else: val = tuple(val) self.fonts[key] = val # colors for key in self.colors: val = self._getOption('colors', key, 'str') if val is not None: self.colors[key] = val # timeouts for key in self.timeouts: val = self._getOption('timeouts', key, 'float') if val is not None: self.timeouts[key] = val # cardsets for key in self.cardset: val = self._getOption('cardsets', str(key), 'list') if val is not None: try: self.cardset[int(key)] = val except Exception: traceback.print_exc() for key, t in (('scale_cards', 'bool'), ('scale_x', 'float'), ('scale_y', 'float'), ('auto_scale', 'bool'), ('spread_stacks', 'bool'), ('preserve_aspect_ratio', 'bool')): val = self._getOption('cardsets', key, t) if val is not None: setattr(self, key, val) # games_geometry for key, val in config['games_geometry'].items(): try: val = [int(i) for i in val] assert len(val) == 2 self.games_geometry[int(key)] = val except Exception: traceback.print_exc() game_geometry = self._getOption('general', 'game_geometry', 'list') if game_geometry is not None: try: self.game_geometry = tuple(int(i) for i in game_geometry) except Exception: traceback.print_exc() # cards offsets for key, val in config['offsets'].items(): try: val = [int(i) for i in val] assert len(val) == 2 self.offsets[key] = val except Exception: traceback.print_exc() # mouse buttons swap def _positive(button): return max([button, 1]) _global_settings['mouse_button1'] = _positive(self.mouse_button1) _global_settings['mouse_button2'] = _positive(self.mouse_button2) _global_settings['mouse_button3'] = _positive(self.mouse_button3)
def get_default_config(cls, encoding=None): '''Get the default configuration (copy)''' return configobj.ConfigObj(cls.__config_defaults[cls], encoding=encoding)