def load_into_settings(filename, settings): """Load config file contents into a Pyramid settings dict. This is a helper function for initialising a Pyramid settings dict from a config file. It flattens the config file sections into dotted settings names and updates the given dictionary in place. You would typically use this when constructing a Pyramid Configurator object, like so:: def main(global_config, **settings): config_file = global_config['__file__'] load_info_settings(config_file, settings) config = Configurator(settings=settings) """ filename = os.path.expandvars(os.path.expanduser(filename)) filename = os.path.abspath(os.path.normpath(filename)) config = Config(filename) # Konfig keywords are added to every section when present, we have to # filter them out, otherwise plugin.load_from_config and # plugin.load_from_settings are unable to create instances. konfig_keywords = ['extends', 'overrides'] # Put values from the config file into the pyramid settings dict. for section in config.sections(): setting_prefix = section.replace(":", ".") for name, value in config.get_map(section).iteritems(): if name not in konfig_keywords: settings[setting_prefix + "." + name] = value # Store a reference to the Config object itself for later retrieval. settings['config'] = config return config
def load_into_settings(filename, settings): """Load config file contents into a Pyramid settings dict. This is a helper function for initialising a Pyramid settings dict from a config file. It flattens the config file sections into dotted settings names and updates the given dictionary in place. You would typically use this when constructing a Pyramid Configurator object, like so:: def main(global_config, **settings): config_file = global_config['__file__'] load_info_settings(config_file, settings) config = Configurator(settings=settings) """ filename = os.path.expandvars(os.path.expanduser(filename)) filename = os.path.abspath(os.path.normpath(filename)) config = Config(filename) # Put values from the config file into the pyramid settings dict. for section in config.sections(): setting_prefix = section.replace(":", ".") for name, value in config.get_map(section).iteritems(): settings[setting_prefix + "." + name] = value # Store a reference to the Config object itself for later retrieval. settings['config'] = config return config
def _is_authorized_app(client_id: str, grant_type: str) -> str: """ 验证 service 的 client、grant_type。 配置参考 setting.yaml :param client_id: service id :param grant_type: 客户端授权凭证 :return: client_secret: service secret key """ from konfig import Config # 获取所有的 client id 对应的 client serct c = Config(current_app.config['SECURITY_CONF_PATH']) try: clients_map = c.get_map('CLIENT_LIST') except: raise ClientConfigNotFound( "无法获取服务端 Client 配置.请研发人员核实. PATH : {}".format( current_app.config['SECURITY_CONF_PATH'])) secret_key = clients_map.get(client_id) if not secret_key: raise NotSupportServiceError( "未知的 Service ID : [{}]. 请核实!".format(client_id)) if not compare_digest(current_app.config['JWT_GRANT_TYPE'], grant_type): raise GrantTypeError( "unKnow `grant_type` . Plz get it from your app development. '{}' . " .format(grant_type)) return secret_key
def test_location_interpolation(self): config = Config(self.file_one) # file_one is a StringIO, so it has no location. self.assertEquals(config.get('one', 'location'), '${HERE}') # file_two is a real file, so it has a location. file_two_loc = os.path.dirname(self.file_two) self.assertEquals(config.get('three', 'location'), file_two_loc)
def init_app(app): # 初始化日志 from vanaspyhelper.LoggerManager import init_global_logger # 要么传入配置路径,要么获取当前目录的上一级 `os.path.dirname(basedir)` current_dir_parent = os.path.dirname(basedir) if 'APP_LOG_DIR' in app.config: log_dir = app.config['APP_LOG_DIR'] else: log_dir = current_dir_parent if 'APP_LOG_LEVEL' in app.config: log_level = app.config['APP_LOG_LEVEL'] else: log_level = "error" from konfig import Config # 初始化 aes c = Config(app.config['SECURITY_CONF_PATH']) global aes aes = AESTool(key=c.get_map('AES').get('AES_SECRET_KEY')) # 初始化 client_id ,client_secret app.config.update(c.get_map('CLIENT_DATA')) # 初始化日志对象 init_global_logger(log_dir, level=log_level, log_prefix="VanasRSC")
def main(): parser = argparse.ArgumentParser(description='Marteau Server') parser.add_argument('config', help='Config file', nargs='?') parser.add_argument('--version', action='store_true', default=False, help='Displays Marteau version and exits.') parser.add_argument('--log-level', dest='loglevel', default='info', choices=LOG_LEVELS.keys() + [key.upper() for key in LOG_LEVELS.keys()], help="log level") parser.add_argument('--log-output', dest='logoutput', default='-', help="log output") parser.add_argument('--host', help='Host', default='0.0.0.0') parser.add_argument('--port', help='Port', type=int, default=8080) args = parser.parse_args() if args.version: print(__version__) sys.exit(0) if args.config is None: parser.print_usage() sys.exit(0) # configure the logger configure_logger(logger, args.loglevel, args.logoutput) # loading the config file config = Config(args.config) # loading the app & the queue global_config = {} if config.has_section('marteau'): settings = config.get_map('marteau') else: settings = {} # check is redis is running if not redis_available(): raise IOError('Marteau needs Redis to run.') # loading the fixtures plugins for fixture in settings.get('fixtures', []): import_string(fixture) logger.info('Loaded plugins: %s' % ', '.join(get_fixtures())) app = webapp(global_config, **settings) try: httpd = SocketIOServer((args.host, args.port), app, resource="socket.io", policy_server=False) logger.info('Hammer ready, at http://%s:%s. Where are the nails ?' % (args.host, args.port)) httpd.serve_forever() except KeyboardInterrupt: sys.exit(0) finally: logger.info('Bye!')
def get_settings(self): """Converts config settings into python dict()""" config = Config(path.join(getcwd(), 'src', 'config.ini')).as_args() [ self.settings.update({config[i * 2][2:]: config[i * 2 + 1]}) for i in range(len(config) // 2) ]
def read_config(cli_fname=None): """Return a config object (``dict``) read from the first found configuration file.""" config_fnames = [] # If given on command line, append the file if cli_fname: config_fnames.append(cli_fname) # If env variable exists, append the file env_fname = os.environ.get('FLEXFIELDDB_CONF') if env_fname: config_fnames.append(env_fname) # Append system config files (or virtualenv config file if in a virtualenv) venv_path = path_to_venv() if not venv_path: config_folders = [ os.path.join(XDG_CONFIG_HOME, 'flexfielddb'), os.path.join('/', 'usr', 'local', 'etc', 'flexfielddb'), os.path.join('/', 'etc', 'flexfielddb'), ] else: config_folders = [os.path.join(venv_path, 'etc', 'flexfielddb')] config_fnames.extend([ os.path.join(config_folder, 'flexfielddb.ini') for config_folder in config_folders ]) for fname in config_fnames: if os.path.exists(fname): return Config(fname)
def openIni(path: str) -> Config: """ 读取 ini 文件 :param path: :return: """ return Config(path)
def test_convert_float(self): config = Config(self.file_args) self.assertEqual(config['floats']['stuff'], 10.3) self.assertEqual(config['floats']['float'], 9.0) self.assertEqual(config['floats']['again'], .3) self.assertEqual(config['floats']['digits'], 10.34) self.assertEqual(config['floats']['digits2'], .34)
def load(args=sys.argv): config = Config(args[1]) data = args[2] counters = RedisCohortCounters(**dict(config['redis'])) print('Loading %r into Redis' % data) with open(data) as f: counters.load(f) print('Done')
def initialize_app(config): # logging configuration logging.config.fileConfig(config, disable_existing_loggers=False) logger.info("Read configuration from %r" % config) app._config_file = config app._config = Config(config) app.add_hook('before_request', before_request) app.add_hook('after_request', after_request) # statsd configuration app._statsd = _Statsd(app._config['statsd']) # sentry configuration if app._config['sentry']['enabled']: app._sentry = Sentry(app._config['sentry']['dsn']) else: app._sentry = None # backend configuration configfile = app._config['absearch']['config'] schemafile = app._config['absearch']['schema'] if app._config['absearch']['backend'] == 'aws': logger.info("Read config and schema from AWS") config_reader = partial(get_s3_file, configfile, app._config, app._statsd) schema_reader = partial(get_s3_file, schemafile, app._config, app._statsd) else: # directory datadir = app._config['directory']['path'] logger.info("Read config and schema from %r on disk" % datadir) def config_reader(): with open(os.path.join(datadir, configfile)) as f: data = f.read() return json.loads(data), hashlib.md5(data).hexdigest() def schema_reader(): with open(os.path.join(datadir, schemafile)) as f: data = f.read() return json.loads(data), hashlib.md5(data).hexdigest() # counter configuration counter = app._config['absearch']['counter'] if counter == 'redis': counter_options = dict(app._config['redis']) else: counter_options = {} counter_options['statsd'] = app._statsd max_age = app._config['absearch']['max_age'] app.settings = SearchSettings(config_reader, schema_reader, counter, counter_options, max_age)
def main(argv): parser = argparse.ArgumentParser(prog=argv[0], description="Location Importer") parser.add_argument("--dry-run", action="store_true") # TODO rely on ICHNAEA_CFG / ichnaea.config, as the worker is relying # on it anyways parser.add_argument("config", help="config file") parser.add_argument("source", help="source file") args = parser.parse_args(argv[1:]) settings = Config(args.config).get_map("ichnaea") db = Database(settings["db_master"], socket=settings.get("db_master_socket"), create=False) session = db.session() added = load_file(session, args.source) print("Added %s records." % added) if args.dry_run: session.rollback() else: # pragma: no cover session.commit() return added
def main(): configure_logger() if len(sys.argv) > 1: ini = sys.argv[1] else: ini = "kompost.ini" os.environ["CURDIR"] = os.getcwd() config = Config(codecs.open(ini, "r", "utf8")) config = dict(config.items("kompost")) target = config["target"] src = config["src"] socket.setdefaulttimeout(int(config.get("timeout", 10))) config["media"] = os.path.abspath(os.path.join(target, "media")) config["generic"] = os.path.join(src, "generic.mako") config["cats"] = os.path.join(src, "category.mako") config["icons"] = ("pen.png", "info.png", "thumbsup.png", "right.png", "flash.png") config["metadata"] = os.path.join(target, "metadata.json") generate(config) pdf(config)
def test_redis(): config = Config(test_config) counter = RedisCohortCounters(**dict(config['redis'])) for i in range(10): counter.incr('en-US', 'US', 'abc') counter.decr('en-US', 'US', 'abc') value = counter.get('en-US', 'US', 'abc') assert value == 9, value
def test_as_args(self): config = Config(self.file_args) args = config.as_args(strip_prefixes=['circus'], omit_sections=['bleh', 'mi', 'floats'], omit_options=[('other', 'thing')]) wanted = [ '--other-stuff', '10.3', '--httpd', '--zmq-endpoint', 'http://ok' ] wanted.sort() args.sort() self.assertEqual(args, wanted) args = config.as_args(omit_sections=['bleh', 'mi', 'floats']) wanted = [ '--circus-zmq-endpoint', 'http://ok', '--other-thing', 'bleh', '--other-stuff', '10.3', '--circus-httpd' ] wanted.sort() args.sort() self.assertEqual(args, wanted) # it also works with an argparse parser parser = argparse.ArgumentParser(description='Run some watchers.') parser.add_argument('config', help='configuration file', nargs='?') parser.add_argument('-L', '--log-level', dest='loglevel') parser.add_argument('--log-output', dest='logoutput') parser.add_argument('--daemon', dest='daemonize', action='store_true') parser.add_argument('--pidfile', dest='pidfile') parser.add_argument('--multi', action='append') args = config.scan_args(parser, strip_prefixes=['mi']) args.sort() wanted = [ '--log-level', u'DEBUG', '--log-output', u'stdout', '--daemon', '--pidfile', u'pid', '--multi', 'one', '--multi', 'two', '--multi', 'three' ] wanted.sort() self.assertEqual(wanted, args)
def main(): configure_logger() if len(sys.argv) > 1: ini = sys.argv[1] else: ini = "kompost.ini" os.environ['CURDIR'] = os.getcwd() config = Config(codecs.open(ini, "r", "utf8")) config = dict(config.items('kompost')) target = config['target'] src = config['src'] socket.setdefaulttimeout(int(config.get('timeout', 10))) config['media'] = os.path.abspath(os.path.join(target, 'media')) config['generic'] = os.path.join(src, 'generic.mako') config['cats'] = os.path.join(src, 'category.mako') config['icons'] = ('pen.png', 'info.png', 'thumbsup.png', 'right.png', 'flash.png') config['metadata'] = os.path.join(target, 'metadata.json') generate(config) pdf(config)
def create_app(): """ Create the app object and return it """ app = Flask(__name__) # Load application settings settings = os.environ.get("FLASK_SETTINGS", SETTINGS) if settings is not None: c = Config(settings) print(c) app.config.update(c.get_map('flask')) from users.views import user # Register the blueprints to app app.register_blueprint(user) db.init_app(app) return app
def _get_configuration(self): # Loads is removing the extra information contained in the ini files, # so we need to parse it again. config_file = self.config['config'] # When copying the configuration files, we lose the config/ prefix so, # try to read from this folder in case the file doesn't exist. if not os.path.isfile(config_file): config_file = os.path.basename(config_file) if not os.path.isfile(config_file): msg = 'Unable to locate the configuration file, aborting.' raise LookupError(msg) return Config(config_file).get_map('loads')
def create(): settings = os.path.join(os.path.dirname(__file__), 'settings.ini') settings = os.environ.get('FLASK_SETTINGS', settings) app = Flask(__name__) app.config_file = Config(settings) app.config.update(app.config_file.get_map('flask')) for blueprint in blueprints: app.register_blueprint(blueprint['pkg'], url_prefix=blueprint['prefix']) return app
def main(args=None): # 1. read the command-line options args = _read_args(args) if args.version: print(__version__) sys.exit(0) # 2. grab the config from konfig import Config config = Config(args.config).get_map('udun') # 3. grab things in the redis list events = _get_redis_events(config.get('redis_host', 'localhost'), config.get('redis_port', 6379), config.get('redis_listname', 'udun')) # 4. combine per-collection collection_ids = _get_impacted_collections(events) # 5. send it to Balrog _poke_balrog(collection_ids)
def main(): if check() != 0: raise ValueError("Invalid File") config = Config(conf) config_file = config['absearch']['config'] schema_file = config['absearch']['schema'] for file_ in (config_file, schema_file): filename = os.path.join(datadir, file_) print('Uploading %r' % filename) set_s3_file(filename, config) print('Done')
def populate_S3(): # populate the bucket in Moto config = Config(test_config) conn = _get_connector(config) conn.create_bucket(config['aws']['bucketname']) datadir = os.path.join(os.path.dirname(__file__), '..', '..', 'data') for file_ in (config['absearch']['config'], config['absearch']['schema']): filename = os.path.join(datadir, file_) set_s3_file(filename, config) _redis = redis.StrictRedis(**dict(config['redis'])) _redis.flushdb()
def initialize_app(config): app._config_file = config app._config = Config(config) # logging configuration logging.config.fileConfig(config) # statsd configuration app._statsd = _Statsd(app._config['statsd']) # sentry configuration if app._config['sentry']['enabled']: app._sentry = Sentry(app._config['sentry']['dsn']) else: app._sentry = None # backend configuration configfile = app._config['absearch']['config'] schemafile = app._config['absearch']['schema'] if app._config['absearch']['backend'] == 'aws': config_reader = partial(get_s3_file, configfile, app._config, app._statsd) schema_reader = partial(get_s3_file, schemafile, app._config, app._statsd) else: # directory datadir = app._config['directory']['path'] def config_reader(): with open(os.path.join(datadir, configfile)) as f: data = f.read() return json.loads(data), hashlib.md5(data).hexdigest() def schema_reader(): with open(os.path.join(datadir, schemafile)) as f: data = f.read() return json.loads(data), hashlib.md5(data).hexdigest() # counter configuration counter = app._config['absearch']['counter'] if counter == 'redis': counter_options = dict(app._config['redis']) else: counter_options = {} counter_options['statsd'] = app._statsd max_age = app._config['absearch']['max_age'] app.settings = SearchSettings(config_reader, schema_reader, counter, counter_options, max_age)
def create_app(name=__name__, blueprints=None, settings=None): app = Flask(name) # load configuration settings = os.environ.get('FLASK_SETTINGS', settings) if settings is not None: app.config_file = Config(settings) app.config.update(app.config_file.get_map('flask')) # register blueprints if blueprints is not None: for bp in blueprints: app.register_blueprint(bp) return app
def load_conf(config_file=None): # default config options = ['db', 'wsserver', 'wsscheme', 'wsport', 'broker', 'debug', 'host', 'port', 'no_auth'] config = {'db': 'python', 'dboptions': {}, 'wsscheme': 'ws', 'wsserver': 'localhost', 'wsport': 8080, 'broker': 'ipc:///tmp/loads-front.ipc', 'debug': True, 'host': '0.0.0.0', 'port': 8080, 'no_auth': False} if config_file is not None: config_parser = Config(config_file) for key, value in config_parser.items('loads'): if key not in options: continue config[key] = value return config
def test_redis_dump_load(): config = Config(test_config) counter = RedisCohortCounters(**dict(config['redis'])) counter._redis.flushdb() for i in range(10): counter.incr('en-US', 'US', 'abc') dumped = list(counter.dump()) counter = RedisCohortCounters(**dict(config['redis'])) counter.load(dumped) value = counter.get('en-US', 'US', 'abc') assert value == 10, value
def initialize_app(config): # logging configuration logging.config.fileConfig(config, disable_existing_loggers=False) logger.info("Read configuration from %r" % config) app._config_file = config app._config = Config(config) app.add_hook('before_request', before_request) app.add_hook('after_request', after_request) # sentry configuration if app._config['sentry']['enabled']: app._sentry = Sentry(app._config['sentry']['dsn']) else: app._sentry = None # backend configuration configfile = app._config['absearch']['config'] schemafile = app._config['absearch']['schema'] # directory datadir = app._config['directory']['path'] logger.info("Read config and schema from %r on disk" % datadir) def config_reader(): with open(os.path.join(datadir, configfile)) as f: data = f.read() return ( json.loads(data), hashlib.md5(data.encode("utf8")).hexdigest(), ) def schema_reader(): with open(os.path.join(datadir, schemafile)) as f: data = f.read() return ( json.loads(data), hashlib.md5(data.encode("utf8")).hexdigest(), ) # counter configuration counter = app._config['absearch']['counter'] counter_options = {} max_age = app._config['absearch']['max_age'] app.settings = SearchSettings(config_reader, schema_reader, counter, counter_options, max_age)
def get_srv_settings(self): """Converts config settings into python dict()""" config = Config(path.join(getcwd(), 'src', 'srv_settings.ini')).as_args() [ self.srv_settings.update({config[i * 2][2:]: config[i * 2 + 1]}) for i in range(len(config) // 2) ] for st_name in self.srv_settings: try: self.tptp_rc_dict.update({ '-'.join(st_name.split('-')[2:]): str(int(self.srv_settings[st_name])).zfill(3) }) except ValueError: raise Exception(f'Wrong value in settings, line - {st_name}')
def test_get_set_s3_file(): class Stats(object): @contextmanager def timer(self, name): yield timed = timer stats = Stats() config = Config(test_config) datadir = os.path.join(os.path.dirname(__file__), '..', '..', 'data') datafile = os.path.join(datadir, config['absearch']['config']) with open(datafile) as f: old_data = f.read() old_hash = hashlib.md5(old_data).hexdigest() # reading the S3 bucket (that was filled with datafile) res, hash = get_s3_file(datafile, config, statsd=stats) assert res['defaultInterval'] == 31536000 assert hash == old_hash # changing the file content res['defaultInterval'] = -1 with open(datafile, 'w') as f: f.write(json.dumps(res)) try: # setting the file in the bucket with the new content set_s3_file(datafile, config, statsd=stats) # getting back the new content res, hash = get_s3_file(config['absearch']['config'], config, use_cache=False, statsd=stats) # we should see the change assert res['defaultInterval'] == -1 finally: # restore old content with open(datafile, 'w') as f: f.write(old_data)
def create_app(name=__name__, blueprints=None, settings=None, template_folder=None, static_folder=None): app = Flask(name, template_folder=template_folder, static_folder=static_folder) # load configuration settings = os.environ.get('FLASK_SETTINGS', settings) if settings is not None: app.config_file = Config(settings) app.config.update(app.config_file.get_map('flask')) app.register_error_handler(404, page_not_found) app.register_error_handler(405, method_not_allowed) # register blueprints if blueprints is not None: for bp in blueprints: app.register_blueprint(bp) bp.app = app app.register_blueprint(doc) return app
def main(sysargs=None): if sysargs is None: sysargs = sys.argv[1:] parser = argparse.ArgumentParser(description='Runs a load test.') parser.add_argument('fqn', help='Fully qualified name of the test', nargs='?') parser.add_argument('--config', help='Configuration file to read', type=str, default=None) parser.add_argument('-u', '--users', help='Number of virtual users', type=str, default='1') # loads works with cycles or duration group = parser.add_mutually_exclusive_group() group.add_argument('-c', '--cycles', help='Number of cycles per users', type=str, default=None) group.add_argument('-d', '--duration', help='Duration of the test (s)', type=int, default=None) parser.add_argument('--version', action='store_true', default=False, help='Displays Loads version and exits.') parser.add_argument('-a', '--agents', help='Number of agents to use', type=int) parser.add_argument('-b', '--broker', help='Broker endpoint', default=DEFAULT_FRONTEND) parser.add_argument('--test-runner', default=None, help='The path to binary to use as the test runner ' 'when in distributed mode. The default is ' 'this runner') parser.add_argument('--server-url', default=None, help='The URL of the server you want to test. It ' 'will override any value your provided in ' 'the tests for the WebTest client.') parser.add_argument('--zmq-receiver', default=DEFAULT_RECEIVER, help='Socket where the agents send the results to.') parser.add_argument('--zmq-publisher', default=DEFAULT_PUBLISHER, help='Socket where the results are published.') outputs = [st.name for st in output_list()] outputs.sort() parser.add_argument('--quiet', action='store_true', default=False) parser.add_argument('--output', action='append', default=['stdout'], help='The output used to display the results', choices=outputs) parser.add_argument('--aws-image-id', help='Amazon Server Id', type=str, default='ami-be77e08e') parser.add_argument('--aws-access-key', help='Amazon Access Key', type=str, default=os.environ.get('ACCESS_KEY')) parser.add_argument('--aws-secret-key', help='Amazon Secret Key', type=str, default=os.environ.get('SECRET_KEY')) parser.add_argument('--aws-ssh-user', help='Amazon User', type=str, default='ubuntu') parser.add_argument('--aws-ssh-key', help='Amazon SSH Key file', type=str, default='ubuntu') parser.add_argument('--aws', help='Running on AWS?', action='store_true', default=False) parser.add_argument('--aws-python-deps', help='Python deps to install', action='append', default=[]) parser.add_argument('--aws-system-deps', help='System deps to install', action='append', default=[]) parser.add_argument('--aws-test-dir', help='Test dir to embark', default=None) # per-output options for output in output_list(): for option, value in output.options.items(): help, type_, default, cli = value if not cli: continue kw = {'help': help, 'type': type_} if default is not None: kw['default'] = default parser.add_argument('--output-%s-%s' % (output.name, option), **kw) args = parser.parse_args(sysargs) if args.config is not None: # second pass ! config = Config(args.config) config_args = config.scan_args(parser, strip_prefixes=['loads']) args = parser.parse_args(args=sysargs + config_args) if args.quiet and 'stdout' in args.output: args.output.remove('stdout') # loggers setting wslogger = logging.getLogger('ws4py') ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) wslogger.addHandler(ch) set_logger() if args.version: print(__version__) sys.exit(0) if args.fqn is None: parser.print_usage() sys.exit(0) # deploy on amazon if args.aws: try_import('paramiko', 'boto') from loads.deploy import aws_deploy master, master_id = aws_deploy(args.aws_access_key, args.aws_secret_key, args.aws_ssh_user, args.aws_ssh_key, args.aws_image_id, args.aws_python_deps, args.aws_system_deps, args.aws_test_dir) # XXX args.broker = 'tcp://%s:5553' % master['host'] args.zmq_publisher = 'tcp://%s:5554' % master['host'] else: master_id = None try: args = dict(args._get_kwargs()) res = run(args) return res finally: if master_id is not None: print 'Shutting down Amazon boxes' from loads.deploy import aws_shutdown aws_shutdown(args['aws_access_key'], args['aws_secret_key'], master_id)
def _parse(sysargs=None): if sysargs is None: sysargs = sys.argv[1:] parser = argparse.ArgumentParser(description="Runs a load test.") parser.add_argument("fqn", help="Fully Qualified Name of the test", nargs="?") parser.add_argument("--config", help="Configuration file to read", type=str, default=None) parser.add_argument("-u", "--users", help="Number of virtual users", type=str, default="1") parser.add_argument("--test-dir", help="Directory to run the test from", type=str, default=None) parser.add_argument("--python-dep", help="Python (PyPI) dependencies " "to install", action="append", default=[]) parser.add_argument( "--include-file", help="File(s) to include (needed for the test) " "- glob-style", action="append", default=[] ) parser.add_argument("--ssh", help="SSH tunnel - e.g. user@server:port", type=str, default=None) # loads works with hits or duration group = parser.add_mutually_exclusive_group() group.add_argument("--hits", help="Number of hits per user", type=str, default=None) group.add_argument("-d", "--duration", help="Duration of the test (s)", type=int, default=None) parser.add_argument("--version", action="store_true", default=False, help="Displays Loads version and exits.") parser.add_argument( "--test-runner", default=None, help="The path to binary to use as the test runner " "when in distributed mode. The default is " "this (python) runner", ) parser.add_argument( "--server-url", default=None, help="The URL of the server you want to test. It " "will override any value your provided in " "the tests for the WebTest client.", ) parser.add_argument( "--observer", action="append", choices=[observer.name for observer in observers], help="Callable that will receive the final results. " "Only in distributed mode (runs on the broker)", ) # # Loading observers options # for observer in observers: prefix = "--observer-%s-" % observer.name for option in observer.options: name = prefix + option["name"] parser.add_argument( name, help=option.get("help"), default=option.get("default"), type=option.get("type"), action=option.get("action"), ) parser.add_argument("--no-patching", help="Deactivate Gevent monkey patching.", action="store_true", default=False) parser.add_argument("--project-name", help="Project name.", default="N/A") # # distributed options # parser.add_argument("-a", "--agents", help="Number of agents to use.", type=int) parser.add_argument( "--zmq-receiver", default=None, help=("ZMQ socket where the runners send the events to" " (opened on the agent side)."), ) parser.add_argument( "--zmq-publisher", default=DEFAULT_PUBLISHER, help="ZMQ socket where the test results messages " "are published.", ) parser.add_argument( "--ping-broker", action="store_true", default=False, help="Pings the broker to get info, display it and " "exits.", ) parser.add_argument( "--check-cluster", action="store_true", default=False, help="Runs a test on all agents then exits." ) parser.add_argument( "--purge-broker", action="store_true", default=False, help="Stops all runs on the broker and exits." ) parser.add_argument("-b", "--broker", help="Broker endpoint", default=DEFAULT_FRONTEND) parser.add_argument("--user-id", help="Name of the user who runs the test", type=str, default="undefined") outputs = [st.name for st in output_list()] outputs.sort() parser.add_argument("--batched", action="store_true", default=False, help="Batch results in distributed mode") parser.add_argument("--quiet", action="store_true", default=False, help="Do not print any log messages.") parser.add_argument( "--output", action="append", default=["stdout"], help="The output which will get the results", choices=outputs ) parser.add_argument("--attach", help="Reattach to a distributed run", action="store_true", default=False) parser.add_argument( "--detach", help="Detach immediatly the current " "distributed run", action="store_true", default=False ) # Adds the per-output and per-runner options. add_options(RUNNERS, parser, fmt="--{name}-{option}") add_options(output_list(), parser, fmt="--output-{name}-{option}") args = parser.parse_args(sysargs) if args.config is not None: # second pass ! config = Config(args.config) config_args = config.scan_args(parser, strip_prefixes=["loads"]) if "fqn" in config["loads"]: config_args += [config["loads"]["fqn"]] args = parser.parse_args(args=sysargs + config_args) if args.quiet and "stdout" in args.output: args.output.remove("stdout") return args, parser
from flask import Flask, render_template, request, url_for, session, redirect, flash from flask_bcrypt import Bcrypt import os from flask_pymongo import PyMongo import requests import urllib.parse #끌어오기 import opgg_crawling from time import sleep #받아오기 속도조절 from konfig import Config cc = Config("./conf.ini") api_conf = cc.get_map("api") app_conf = cc.get_map("app") db_conf = cc.get_map("db") app = Flask(__name__) #DB와 비밀번호는 환경변수에서 가져온다. app.config['SECRET_KEY'] = app_conf['SECRET_KEY'] app.config['MONGO_URI'] = db_conf['MONGO_URI'] apikey = api_conf['LOL_API_KEY'] mongo = PyMongo(app) bcrypt = Bcrypt(app) @app.route('/') def index(name=None): tip_List = mongo.db.tip_List get_tips = tip_List.find().sort([['_id', -1]]).limit(10) tip_lists = []
def get_config(filename=None): filename = filename or os.environ.get('CONFIG', 'development.ini') return Config(filename)
def main(sysargs=None): if sysargs is None: sysargs = sys.argv[1:] parser = argparse.ArgumentParser(description='Runs a load test.') parser.add_argument('fqn', help='Fully Qualified Name of the test', nargs='?') parser.add_argument('--config', help='Configuration file to read', type=str, default=None) parser.add_argument('-u', '--users', help='Number of virtual users', type=str, default='1') parser.add_argument('--test-dir', help='Directory to run the test from', type=str, default=None) parser.add_argument('--python-dep', help='Python (PyPI) dependencies ' 'to install', action='append', default=[]) parser.add_argument('--include-file', help='File(s) to include (needed for the test) ' '- glob-style', action='append', default=[]) # loads works with hits or duration group = parser.add_mutually_exclusive_group() group.add_argument('--hits', help='Number of hits per user', type=str, default=None) group.add_argument('-d', '--duration', help='Duration of the test (s)', type=int, default=None) parser.add_argument('--version', action='store_true', default=False, help='Displays Loads version and exits.') parser.add_argument('--test-runner', default=None, help='The path to binary to use as the test runner ' 'when in distributed mode. The default is ' 'this (python) runner') parser.add_argument('--server-url', default=None, help='The URL of the server you want to test. It ' 'will override any value your provided in ' 'the tests for the WebTest client.') parser.add_argument('--observer', action='append', help='Callable that will receive the final results. ' 'Only in distributed mode (runs on the broker)') parser.add_argument('--no-patching', help='Deactivate Gevent monkey patching.', action='store_true', default=False) # # distributed options # parser.add_argument('-a', '--agents', help='Number of agents to use.', type=int) parser.add_argument('--zmq-receiver', default=None, help=('ZMQ socket where the runners send the events to' ' (opened on the agent side).')) parser.add_argument('--zmq-publisher', default=DEFAULT_PUBLISHER, help='ZMQ socket where the test results messages ' 'are published.') parser.add_argument('--ping-broker', action='store_true', default=False, help='Pings the broker to get info, display it and ' 'exits.') parser.add_argument('--check-cluster', action='store_true', default=False, help='Runs a test on all agents then exits.') parser.add_argument('--purge-broker', action='store_true', default=False, help='Stops all runs on the broker and exits.') parser.add_argument('-b', '--broker', help='Broker endpoint', default=DEFAULT_FRONTEND) outputs = [st.name for st in output_list()] outputs.sort() parser.add_argument('--quiet', action='store_true', default=False, help='Do not print any log messages.') parser.add_argument('--output', action='append', default=['stdout'], help='The output which will get the results', choices=outputs) parser.add_argument('--attach', help='Reattach to a distributed run', action='store_true', default=False) parser.add_argument('--detach', help='Detach immediatly the current ' 'distributed run', action='store_true', default=False) # Adds the per-output and per-runner options. add_options(RUNNERS, parser, fmt='--{name}-{option}') add_options(output_list(), parser, fmt='--output-{name}-{option}') args = parser.parse_args(sysargs) if args.config is not None: # second pass ! config = Config(args.config) config_args = config.scan_args(parser, strip_prefixes=['loads']) if 'fqn' in config['loads']: config_args += [config['loads']['fqn']] args = parser.parse_args(args=sysargs + config_args) if args.quiet and 'stdout' in args.output: args.output.remove('stdout') # loggers setting wslogger = logging.getLogger('ws4py') ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) wslogger.addHandler(ch) set_logger() if args.version: print(__version__) sys.exit(0) if args.ping_broker: client = Client(args.broker) res = client.ping() print('Broker running on pid %d' % res['pid']) print('%d agents registered' % len(res['agents'])) print('endpoints:') for name, location in res['endpoints'].items(): print(' - %s: %s' % (name, location)) runs = client.list_runs() if len(runs) == 0: print('Nothing is running right now.') else: print('We have %d run(s) right now:' % len(runs)) for run_id, agents in runs.items(): print(' - %s with %d agent(s)' % (run_id, len(agents))) sys.exit(0) if args.purge_broker: client = Client(args.broker) runs = client.list_runs() if len(runs) == 0: print('Nothing to purge.') else: print('We have %d run(s) right now:' % len(runs)) for run_id, workers in runs.items(): print('Purging %s...' % run_id) client.stop_run(run_id) print('Purged.') sys.exit(0) if args.check_cluster: args.fqn = 'loads.examples.test_blog.TestWebSite.test_health' client = Client(args.broker) res = client.ping() args.agents = len(res['agents']) args.hits = '1' print('Running a healt check on all %d agents' % args.agents) if args.fqn is None and not args.attach: parser.print_usage() sys.exit(0) args = dict(args._get_kwargs()) res = run(args) return res
def _parse(sysargs=None): if sysargs is None: sysargs = sys.argv[1:] parser = argparse.ArgumentParser(description='Runs a load test.') parser.add_argument('fqn', help='Fully Qualified Name of the test', nargs='?') parser.add_argument('--config', help='Configuration file to read', type=str, default=None) parser.add_argument('-u', '--users', help='Number of virtual users', type=str, default='1') parser.add_argument('--test-dir', help='Directory to run the test from', type=str, default=None) parser.add_argument('--python-dep', help='Python (PyPI) dependencies ' 'to install', action='append', default=[]) parser.add_argument('--include-file', help='File(s) to include (needed for the test) ' '- glob-style', action='append', default=[]) parser.add_argument('--ssh', help='SSH tunnel - e.g. user@server:port', type=str, default=None) # loads works with hits or duration group = parser.add_mutually_exclusive_group() group.add_argument('--hits', help='Number of hits per user', type=str, default=None) group.add_argument('-d', '--duration', help='Duration of the test (s)', type=int, default=None) parser.add_argument('--version', action='store_true', default=False, help='Displays Loads version and exits.') parser.add_argument('--test-runner', default=None, help='The path to binary to use as the test runner ' 'when in distributed mode. The default is ' 'this (python) runner') parser.add_argument('--server-url', default=None, help='The URL of the server you want to test. It ' 'will override any value your provided in ' 'the tests for the WebTest client.') parser.add_argument('--observer', action='append', choices=[observer.name for observer in observers], help='Callable that will receive the final results. ' 'Only in distributed mode (runs on the broker)') # # Loading observers options # for observer in observers: prefix = '--observer-%s-' % observer.name for option in observer.options: name = prefix + option['name'] parser.add_argument(name, help=option.get('help'), default=option.get('default'), type=option.get('type'), action=option.get('action')) parser.add_argument('--no-patching', help='Deactivate Gevent monkey patching.', action='store_true', default=False) parser.add_argument('--project-name', help='Project name.', default='N/A') # # distributed options # parser.add_argument('-a', '--agents', help='Number of agents to use.', type=int) parser.add_argument('--zmq-receiver', default=None, help=('ZMQ socket where the runners send the events to' ' (opened on the agent side).')) parser.add_argument('--zmq-publisher', default=DEFAULT_PUBLISHER, help='ZMQ socket where the test results messages ' 'are published.') parser.add_argument('--ping-broker', action='store_true', default=False, help='Pings the broker to get info, display it and ' 'exits.') parser.add_argument('--check-cluster', action='store_true', default=False, help='Runs a test on all agents then exits.') parser.add_argument('--purge-broker', action='store_true', default=False, help='Stops all runs on the broker and exits.') parser.add_argument('-b', '--broker', help='Broker endpoint', default=DEFAULT_FRONTEND) parser.add_argument('--user-id', help='Name of the user who runs the test', type=str, default='undefined') outputs = [st.name for st in output_list()] outputs.sort() parser.add_argument('--batched', action='store_true', default=False, help='Batch results in distributed mode') parser.add_argument('--quiet', action='store_true', default=False, help='Do not print any log messages.') parser.add_argument('--output', action='append', default=['stdout'], help='The output which will get the results', choices=outputs) parser.add_argument('--attach', help='Reattach to a distributed run', action='store_true', default=False) parser.add_argument('--detach', help='Detach immediatly the current ' 'distributed run', action='store_true', default=False) # Adds the per-output and per-runner options. add_options(RUNNERS, parser, fmt='--{name}-{option}') add_options(output_list(), parser, fmt='--output-{name}-{option}') args = parser.parse_args(sysargs) if args.config is not None: # second pass ! config = Config(args.config) config_args = config.scan_args(parser, strip_prefixes=['loads']) if 'fqn' in config['loads']: config_args += [config['loads']['fqn']] args = parser.parse_args(args=sysargs + config_args) if args.quiet and 'stdout' in args.output: args.output.remove('stdout') return args, parser
def _parse(sysargs=None): if sysargs is None: sysargs = sys.argv[1:] parser = argparse.ArgumentParser(description='Runs a load test.') parser.add_argument('fqn', help='Fully Qualified Name of the test', nargs='?') parser.add_argument('--config', help='Configuration file to read', type=str, default=None) parser.add_argument('-u', '--users', help='Number of virtual users', type=str, default='1') parser.add_argument('--test-dir', help='Directory to run the test from', type=str, default=None) parser.add_argument('--python-dep', help='Python (PyPI) dependencies ' 'to install', action='append', default=[]) parser.add_argument('--include-file', help='File(s) to include (needed for the test) ' '- glob-style', action='append', default=[]) # loads works with hits or duration group = parser.add_mutually_exclusive_group() group.add_argument('--hits', help='Number of hits per user', type=str, default=None) group.add_argument('-d', '--duration', help='Duration of the test (s)', type=int, default=None) parser.add_argument('--version', action='store_true', default=False, help='Displays Loads version and exits.') parser.add_argument('--test-runner', default=None, help='The path to binary to use as the test runner ' 'when in distributed mode. The default is ' 'this (python) runner') parser.add_argument('--server-url', default=None, help='The URL of the server you want to test. It ' 'will override any value your provided in ' 'the tests for the WebTest client.') parser.add_argument('--observer', action='append', help='Callable that will receive the final results. ' 'Only in distributed mode (runs on the broker)') parser.add_argument('--no-patching', help='Deactivate Gevent monkey patching.', action='store_true', default=False) parser.add_argument('--project-name', help='Project name.', default='N/A') # # distributed options # parser.add_argument('-a', '--agents', help='Number of agents to use.', type=int) parser.add_argument('--zmq-receiver', default=None, help=('ZMQ socket where the runners send the events to' ' (opened on the agent side).')) parser.add_argument('--zmq-publisher', default=DEFAULT_PUBLISHER, help='ZMQ socket where the test results messages ' 'are published.') parser.add_argument('--ping-broker', action='store_true', default=False, help='Pings the broker to get info, display it and ' 'exits.') parser.add_argument('--check-cluster', action='store_true', default=False, help='Runs a test on all agents then exits.') parser.add_argument('--purge-broker', action='store_true', default=False, help='Stops all runs on the broker and exits.') parser.add_argument('-b', '--broker', help='Broker endpoint', default=DEFAULT_FRONTEND) outputs = [st.name for st in output_list()] outputs.sort() parser.add_argument('--quiet', action='store_true', default=False, help='Do not print any log messages.') parser.add_argument('--output', action='append', default=['stdout'], help='The output which will get the results', choices=outputs) parser.add_argument('--attach', help='Reattach to a distributed run', action='store_true', default=False) parser.add_argument('--detach', help='Detach immediatly the current ' 'distributed run', action='store_true', default=False) # Adds the per-output and per-runner options. add_options(RUNNERS, parser, fmt='--{name}-{option}') add_options(output_list(), parser, fmt='--output-{name}-{option}') args = parser.parse_args(sysargs) if args.config is not None: # second pass ! config = Config(args.config) config_args = config.scan_args(parser, strip_prefixes=['loads']) if 'fqn' in config['loads']: config_args += [config['loads']['fqn']] args = parser.parse_args(args=sysargs + config_args) if args.quiet and 'stdout' in args.output: args.output.remove('stdout') return args, parser
def config(): ini = os.environ.get('ICHNAEA_CFG', 'ichnaea.ini') return Config(ini)