Esempio n. 1
0
def read_config(cli_fname=None):
    """Return a config  object (``dict``) read from the first found configuration file."""
    config_fnames = []
    # If given on command line, append the file
    if cli_fname:
        config_fnames.append(cli_fname)
    # If env variable exists, append the file
    env_fname = os.environ.get('FLEXFIELDDB_CONF')
    if env_fname:
        config_fnames.append(env_fname)
    # Append system config files (or virtualenv config file if in a virtualenv)
    venv_path = path_to_venv()
    if not venv_path:
        config_folders = [
            os.path.join(XDG_CONFIG_HOME, 'flexfielddb'),
            os.path.join('/', 'usr', 'local', 'etc', 'flexfielddb'),
            os.path.join('/', 'etc', 'flexfielddb'),
        ]
    else:
        config_folders = [os.path.join(venv_path, 'etc', 'flexfielddb')]
    config_fnames.extend([
        os.path.join(config_folder, 'flexfielddb.ini')
        for config_folder in config_folders
    ])
    for fname in config_fnames:
        if os.path.exists(fname):
            return Config(fname)
Esempio n. 2
0
 def test_location_interpolation(self):
     config = Config(self.file_one)
     # file_one is a StringIO, so it has no location.
     self.assertEquals(config.get('one', 'location'), '${HERE}')
     # file_two is a real file, so it has a location.
     file_two_loc = os.path.dirname(self.file_two)
     self.assertEquals(config.get('three', 'location'), file_two_loc)
Esempio n. 3
0
def _is_authorized_app(client_id: str, grant_type: str) -> str:
    """
    验证 service 的 client、grant_type。 配置参考 setting.yaml
    :param client_id: service id
    :param grant_type:  客户端授权凭证
    :return: client_secret:  service secret key
    """

    from konfig import Config
    # 获取所有的 client id 对应的 client serct
    c = Config(current_app.config['SECURITY_CONF_PATH'])

    try:
        clients_map = c.get_map('CLIENT_LIST')
    except:
        raise ClientConfigNotFound(
            "无法获取服务端 Client 配置.请研发人员核实. PATH : {}".format(
                current_app.config['SECURITY_CONF_PATH']))

    secret_key = clients_map.get(client_id)

    if not secret_key:
        raise NotSupportServiceError(
            "未知的 Service ID : [{}]. 请核实!".format(client_id))

    if not compare_digest(current_app.config['JWT_GRANT_TYPE'], grant_type):
        raise GrantTypeError(
            "unKnow `grant_type` . Plz get it from your app development. '{}' . "
            .format(grant_type))

    return secret_key
Esempio n. 4
0
 def test_convert_float(self):
     config = Config(self.file_args)
     self.assertEqual(config['floats']['stuff'], 10.3)
     self.assertEqual(config['floats']['float'], 9.0)
     self.assertEqual(config['floats']['again'], .3)
     self.assertEqual(config['floats']['digits'], 10.34)
     self.assertEqual(config['floats']['digits2'], .34)
Esempio n. 5
0
 def get_settings(self):
     """Converts config settings into python dict()"""
     config = Config(path.join(getcwd(), 'src', 'config.ini')).as_args()
     [
         self.settings.update({config[i * 2][2:]: config[i * 2 + 1]})
         for i in range(len(config) // 2)
     ]
Esempio n. 6
0
def openIni(path: str) -> Config:
    """
    读取 ini 文件
    :param path:
    :return:
    """
    return Config(path)
Esempio n. 7
0
    def init_app(app):
        # 初始化日志
        from vanaspyhelper.LoggerManager import init_global_logger

        # 要么传入配置路径,要么获取当前目录的上一级 `os.path.dirname(basedir)`
        current_dir_parent = os.path.dirname(basedir)

        if 'APP_LOG_DIR' in app.config:
            log_dir = app.config['APP_LOG_DIR']
        else:
            log_dir = current_dir_parent

        if 'APP_LOG_LEVEL' in app.config:
            log_level = app.config['APP_LOG_LEVEL']
        else:
            log_level = "error"

        from konfig import Config

        # 初始化 aes
        c = Config(app.config['SECURITY_CONF_PATH'])

        global aes
        aes = AESTool(key=c.get_map('AES').get('AES_SECRET_KEY'))

        # 初始化 client_id ,client_secret
        app.config.update(c.get_map('CLIENT_DATA'))

        # 初始化日志对象
        init_global_logger(log_dir, level=log_level, log_prefix="VanasRSC")
def load_into_settings(filename, settings):
    """Load config file contents into a Pyramid settings dict.

    This is a helper function for initialising a Pyramid settings dict from
    a config file.  It flattens the config file sections into dotted settings
    names and updates the given dictionary in place.

    You would typically use this when constructing a Pyramid Configurator
    object, like so::

        def main(global_config, **settings):
            config_file = global_config['__file__']
            load_info_settings(config_file, settings)
            config = Configurator(settings=settings)

    """
    filename = os.path.expandvars(os.path.expanduser(filename))
    filename = os.path.abspath(os.path.normpath(filename))
    config = Config(filename)

    # Konfig keywords are added to every section when present, we have to
    # filter them out, otherwise plugin.load_from_config and
    # plugin.load_from_settings are unable to create instances.
    konfig_keywords = ['extends', 'overrides']

    # Put values from the config file into the pyramid settings dict.
    for section in config.sections():
        setting_prefix = section.replace(":", ".")
        for name, value in config.get_map(section).iteritems():
            if name not in konfig_keywords:
                settings[setting_prefix + "." + name] = value

    # Store a reference to the Config object itself for later retrieval.
    settings['config'] = config
    return config
def load(args=sys.argv):
    config = Config(args[1])
    data = args[2]
    counters = RedisCohortCounters(**dict(config['redis']))

    print('Loading %r into Redis' % data)
    with open(data) as f:
        counters.load(f)
    print('Done')
Esempio n. 10
0
def initialize_app(config):
    # logging configuration
    logging.config.fileConfig(config, disable_existing_loggers=False)
    logger.info("Read configuration from %r" % config)

    app._config_file = config
    app._config = Config(config)

    app.add_hook('before_request', before_request)
    app.add_hook('after_request', after_request)

    # statsd configuration
    app._statsd = _Statsd(app._config['statsd'])

    # sentry configuration
    if app._config['sentry']['enabled']:
        app._sentry = Sentry(app._config['sentry']['dsn'])
    else:
        app._sentry = None

    # backend configuration
    configfile = app._config['absearch']['config']
    schemafile = app._config['absearch']['schema']

    if app._config['absearch']['backend'] == 'aws':
        logger.info("Read config and schema from AWS")
        config_reader = partial(get_s3_file, configfile, app._config,
                                app._statsd)
        schema_reader = partial(get_s3_file, schemafile, app._config,
                                app._statsd)
    else:
        # directory
        datadir = app._config['directory']['path']
        logger.info("Read config and schema from %r on disk" % datadir)

        def config_reader():
            with open(os.path.join(datadir, configfile)) as f:
                data = f.read()
                return json.loads(data), hashlib.md5(data).hexdigest()

        def schema_reader():
            with open(os.path.join(datadir, schemafile)) as f:
                data = f.read()
                return json.loads(data), hashlib.md5(data).hexdigest()

    # counter configuration
    counter = app._config['absearch']['counter']
    if counter == 'redis':
        counter_options = dict(app._config['redis'])
    else:
        counter_options = {}
    counter_options['statsd'] = app._statsd

    max_age = app._config['absearch']['max_age']
    app.settings = SearchSettings(config_reader, schema_reader, counter,
                                  counter_options, max_age)
def test_redis():
    config = Config(test_config)
    counter = RedisCohortCounters(**dict(config['redis']))

    for i in range(10):
        counter.incr('en-US', 'US', 'abc')
    counter.decr('en-US', 'US', 'abc')

    value = counter.get('en-US', 'US', 'abc')
    assert value == 9, value
Esempio n. 12
0
 def _get_configuration(self):
     # Loads is removing the extra information contained in the ini files,
     # so we need to parse it again.
     config_file = self.config['config']
     # When copying the configuration files, we lose the config/ prefix so,
     # try to read from this folder in case the file doesn't exist.
     if not os.path.isfile(config_file):
         config_file = os.path.basename(config_file)
         if not os.path.isfile(config_file):
             msg = 'Unable to locate the configuration file, aborting.'
             raise LookupError(msg)
     return Config(config_file).get_map('loads')
Esempio n. 13
0
def create():
    settings = os.path.join(os.path.dirname(__file__), 'settings.ini')
    settings = os.environ.get('FLASK_SETTINGS', settings)

    app = Flask(__name__)
    app.config_file = Config(settings)
    app.config.update(app.config_file.get_map('flask'))

    for blueprint in blueprints:
        app.register_blueprint(blueprint['pkg'],
                               url_prefix=blueprint['prefix'])

    return app
Esempio n. 14
0
def main():
    if check() != 0:
        raise ValueError("Invalid File")
    config = Config(conf)
    config_file = config['absearch']['config']
    schema_file = config['absearch']['schema']

    for file_ in (config_file, schema_file):
        filename = os.path.join(datadir, file_)
        print('Uploading %r' % filename)
        set_s3_file(filename, config)

    print('Done')
Esempio n. 15
0
def initialize_app(config):
    app._config_file = config
    app._config = Config(config)

    # logging configuration
    logging.config.fileConfig(config)

    # statsd configuration
    app._statsd = _Statsd(app._config['statsd'])

    # sentry configuration
    if app._config['sentry']['enabled']:
        app._sentry = Sentry(app._config['sentry']['dsn'])
    else:
        app._sentry = None

    # backend configuration
    configfile = app._config['absearch']['config']
    schemafile = app._config['absearch']['schema']

    if app._config['absearch']['backend'] == 'aws':
        config_reader = partial(get_s3_file, configfile, app._config,
                                app._statsd)
        schema_reader = partial(get_s3_file, schemafile, app._config,
                                app._statsd)
    else:
        # directory
        datadir = app._config['directory']['path']

        def config_reader():
            with open(os.path.join(datadir, configfile)) as f:
                data = f.read()
                return json.loads(data), hashlib.md5(data).hexdigest()

        def schema_reader():
            with open(os.path.join(datadir, schemafile)) as f:
                data = f.read()
                return json.loads(data), hashlib.md5(data).hexdigest()

    # counter configuration
    counter = app._config['absearch']['counter']
    if counter == 'redis':
        counter_options = dict(app._config['redis'])
    else:
        counter_options = {}
    counter_options['statsd'] = app._statsd

    max_age = app._config['absearch']['max_age']
    app.settings = SearchSettings(config_reader, schema_reader, counter,
                                  counter_options, max_age)
Esempio n. 16
0
def populate_S3():
    # populate the bucket in Moto
    config = Config(test_config)
    conn = _get_connector(config)
    conn.create_bucket(config['aws']['bucketname'])

    datadir = os.path.join(os.path.dirname(__file__), '..', '..', 'data')

    for file_ in (config['absearch']['config'], config['absearch']['schema']):
        filename = os.path.join(datadir, file_)
        set_s3_file(filename, config)

    _redis = redis.StrictRedis(**dict(config['redis']))
    _redis.flushdb()
Esempio n. 17
0
def create_app(name=__name__, blueprints=None, settings=None):
    app = Flask(name)

    # load configuration
    settings = os.environ.get('FLASK_SETTINGS', settings)
    if settings is not None:
        app.config_file = Config(settings)
        app.config.update(app.config_file.get_map('flask'))

    # register blueprints
    if blueprints is not None:
        for bp in blueprints:
            app.register_blueprint(bp)

    return app
def test_redis_dump_load():
    config = Config(test_config)
    counter = RedisCohortCounters(**dict(config['redis']))
    counter._redis.flushdb()

    for i in range(10):
        counter.incr('en-US', 'US', 'abc')

    dumped = list(counter.dump())

    counter = RedisCohortCounters(**dict(config['redis']))
    counter.load(dumped)

    value = counter.get('en-US', 'US', 'abc')
    assert value == 10, value
Esempio n. 19
0
def initialize_app(config):
    # logging configuration
    logging.config.fileConfig(config, disable_existing_loggers=False)
    logger.info("Read configuration from %r" % config)

    app._config_file = config
    app._config = Config(config)

    app.add_hook('before_request', before_request)
    app.add_hook('after_request', after_request)

    # sentry configuration
    if app._config['sentry']['enabled']:
        app._sentry = Sentry(app._config['sentry']['dsn'])
    else:
        app._sentry = None

    # backend configuration
    configfile = app._config['absearch']['config']
    schemafile = app._config['absearch']['schema']

    # directory
    datadir = app._config['directory']['path']
    logger.info("Read config and schema from %r on disk" % datadir)

    def config_reader():
        with open(os.path.join(datadir, configfile)) as f:
            data = f.read()
            return (
                json.loads(data),
                hashlib.md5(data.encode("utf8")).hexdigest(),
            )

    def schema_reader():
        with open(os.path.join(datadir, schemafile)) as f:
            data = f.read()
            return (
                json.loads(data),
                hashlib.md5(data.encode("utf8")).hexdigest(),
            )

    # counter configuration
    counter = app._config['absearch']['counter']
    counter_options = {}

    max_age = app._config['absearch']['max_age']
    app.settings = SearchSettings(config_reader, schema_reader, counter,
                                  counter_options, max_age)
Esempio n. 20
0
 def get_srv_settings(self):
     """Converts config settings into python dict()"""
     config = Config(path.join(getcwd(), 'src',
                               'srv_settings.ini')).as_args()
     [
         self.srv_settings.update({config[i * 2][2:]: config[i * 2 + 1]})
         for i in range(len(config) // 2)
     ]
     for st_name in self.srv_settings:
         try:
             self.tptp_rc_dict.update({
                 '-'.join(st_name.split('-')[2:]):
                 str(int(self.srv_settings[st_name])).zfill(3)
             })
         except ValueError:
             raise Exception(f'Wrong value in settings, line - {st_name}')
Esempio n. 21
0
def test_get_set_s3_file():
    class Stats(object):
        @contextmanager
        def timer(self, name):
            yield

        timed = timer

    stats = Stats()
    config = Config(test_config)
    datadir = os.path.join(os.path.dirname(__file__), '..', '..', 'data')
    datafile = os.path.join(datadir, config['absearch']['config'])

    with open(datafile) as f:
        old_data = f.read()
        old_hash = hashlib.md5(old_data).hexdigest()

    # reading the S3 bucket (that was filled with datafile)
    res, hash = get_s3_file(datafile, config, statsd=stats)
    assert res['defaultInterval'] == 31536000
    assert hash == old_hash

    # changing the file content
    res['defaultInterval'] = -1
    with open(datafile, 'w') as f:
        f.write(json.dumps(res))

    try:
        # setting the file in the bucket with the new content
        set_s3_file(datafile, config, statsd=stats)

        # getting back the new content
        res, hash = get_s3_file(config['absearch']['config'],
                                config,
                                use_cache=False,
                                statsd=stats)

        # we should see the change
        assert res['defaultInterval'] == -1
    finally:
        # restore old content
        with open(datafile, 'w') as f:
            f.write(old_data)
Esempio n. 22
0
    def test_as_args(self):
        config = Config(self.file_args)
        args = config.as_args(strip_prefixes=['circus'],
                              omit_sections=['bleh', 'mi', 'floats'],
                              omit_options=[('other', 'thing')])

        wanted = [
            '--other-stuff', '10.3', '--httpd', '--zmq-endpoint', 'http://ok'
        ]
        wanted.sort()
        args.sort()
        self.assertEqual(args, wanted)

        args = config.as_args(omit_sections=['bleh', 'mi', 'floats'])
        wanted = [
            '--circus-zmq-endpoint', 'http://ok', '--other-thing', 'bleh',
            '--other-stuff', '10.3', '--circus-httpd'
        ]
        wanted.sort()
        args.sort()
        self.assertEqual(args, wanted)

        # it also works with an argparse parser
        parser = argparse.ArgumentParser(description='Run some watchers.')
        parser.add_argument('config', help='configuration file', nargs='?')

        parser.add_argument('-L', '--log-level', dest='loglevel')
        parser.add_argument('--log-output', dest='logoutput')
        parser.add_argument('--daemon', dest='daemonize', action='store_true')
        parser.add_argument('--pidfile', dest='pidfile')
        parser.add_argument('--multi', action='append')

        args = config.scan_args(parser, strip_prefixes=['mi'])
        args.sort()

        wanted = [
            '--log-level', u'DEBUG', '--log-output', u'stdout', '--daemon',
            '--pidfile', u'pid', '--multi', 'one', '--multi', 'two', '--multi',
            'three'
        ]
        wanted.sort()

        self.assertEqual(wanted, args)
Esempio n. 23
0
def main():
    configure_logger()
    if len(sys.argv) > 1:
        ini = sys.argv[1]
    else:
        ini = "kompost.ini"
    os.environ['CURDIR'] = os.getcwd()
    config = Config(codecs.open(ini, "r", "utf8"))
    config = dict(config.items('kompost'))
    target = config['target']
    src = config['src']
    socket.setdefaulttimeout(int(config.get('timeout', 10)))
    config['media'] = os.path.abspath(os.path.join(target, 'media'))
    config['generic'] = os.path.join(src, 'generic.mako')
    config['cats'] = os.path.join(src, 'category.mako')
    config['icons'] = ('pen.png', 'info.png', 'thumbsup.png', 'right.png',
                       'flash.png')
    config['metadata'] = os.path.join(target, 'metadata.json')
    generate(config)
    pdf(config)
Esempio n. 24
0
def create_app():
    """
    Create the app object and return it
    """
    app = Flask(__name__)

    # Load application settings
    settings = os.environ.get("FLASK_SETTINGS", SETTINGS)
    if settings is not None:
        c = Config(settings)
        print(c)
        app.config.update(c.get_map('flask'))

    from users.views import user
    # Register the blueprints to app
    app.register_blueprint(user)

    db.init_app(app)

    return app
Esempio n. 25
0
def main(args=None):
    # 1. read the command-line options
    args = _read_args(args)

    if args.version:
        print(__version__)
        sys.exit(0)

    # 2. grab the config
    from konfig import Config
    config = Config(args.config).get_map('udun')

    # 3. grab things in the redis list
    events = _get_redis_events(config.get('redis_host', 'localhost'),
                               config.get('redis_port', 6379),
                               config.get('redis_listname', 'udun'))

    # 4. combine per-collection
    collection_ids = _get_impacted_collections(events)

    # 5. send it to Balrog
    _poke_balrog(collection_ids)
Esempio n. 26
0
def create_app(name=__name__,
               blueprints=None,
               settings=None,
               template_folder=None,
               static_folder=None):
    app = Flask(name,
                template_folder=template_folder,
                static_folder=static_folder)

    # load configuration
    settings = os.environ.get('FLASK_SETTINGS', settings)
    if settings is not None:
        app.config_file = Config(settings)
        app.config.update(app.config_file.get_map('flask'))
    app.register_error_handler(404, page_not_found)
    app.register_error_handler(405, method_not_allowed)
    # register blueprints
    if blueprints is not None:
        for bp in blueprints:
            app.register_blueprint(bp)
            bp.app = app
    app.register_blueprint(doc)

    return app
Esempio n. 27
0
    def test_reader(self):
        config = Config(self.file_one)

        # values conversion
        self.assertEquals(config.get('one', 'foo'), 'bar')
        self.assertEquals(config.get('one', 'num'), -12)
        self.assertEquals(config.get('one', 'not_a_num'), "12abc")
        self.assertEquals(config.get('one', 'st'), 'o=k')
        self.assertEquals(config.get('one', 'lines'), [1, 'two', 3])
        self.assertEquals(config.get('one', 'env'), 'some stuff')

        # getting a map
        map = config.get_map()
        self.assertEquals(map['one.foo'], 'bar')

        map = config.get_map('one')
        self.assertEquals(map['foo'], 'bar')

        del os.environ['__STUFF__']
        self.assertEquals(config.get('one', 'env'), 'some stuff')

        # extends
        self.assertEquals(config.get('three', 'more'), 'stuff')
        self.assertEquals(config.get('one', 'two'), 'a')
Esempio n. 28
0
def config():
    ini = os.environ.get('ICHNAEA_CFG', 'ichnaea.ini')
    return Config(ini)
Esempio n. 29
0
def _parse(sysargs=None):
    if sysargs is None:
        sysargs = sys.argv[1:]

    parser = argparse.ArgumentParser(description='Runs a load test.')
    parser.add_argument('fqn',
                        help='Fully Qualified Name of the test',
                        nargs='?')

    parser.add_argument('--config',
                        help='Configuration file to read',
                        type=str,
                        default=None)

    parser.add_argument('-u',
                        '--users',
                        help='Number of virtual users',
                        type=str,
                        default='1')

    parser.add_argument('--test-dir',
                        help='Directory to run the test from',
                        type=str,
                        default=None)

    parser.add_argument('--python-dep',
                        help='Python (PyPI) dependencies '
                        'to install',
                        action='append',
                        default=[])

    parser.add_argument('--include-file',
                        help='File(s) to include (needed for the test) '
                        '- glob-style',
                        action='append',
                        default=[])

    parser.add_argument('--ssh',
                        help='SSH tunnel - e.g. user@server:port',
                        type=str,
                        default=None)

    # loads works with hits or duration
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--hits',
                       help='Number of hits per user',
                       type=str,
                       default=None)
    group.add_argument('-d',
                       '--duration',
                       help='Duration of the test (s)',
                       type=int,
                       default=None)

    parser.add_argument('--version',
                        action='store_true',
                        default=False,
                        help='Displays Loads version and exits.')

    parser.add_argument('--test-runner',
                        default=None,
                        help='The path to binary to use as the test runner '
                        'when in distributed mode. The default is '
                        'this (python) runner')

    parser.add_argument('--server-url',
                        default=None,
                        help='The URL of the server you want to test. It '
                        'will override any value your provided in '
                        'the tests for the WebTest client.')

    parser.add_argument('--observer',
                        action='append',
                        choices=[observer.name for observer in observers],
                        help='Callable that will receive the final results. '
                        'Only in distributed mode (runs on the broker)')

    #
    # Loading observers options
    #
    for observer in observers:
        prefix = '--observer-%s-' % observer.name
        for option in observer.options:
            name = prefix + option['name']
            parser.add_argument(name,
                                help=option.get('help'),
                                default=option.get('default'),
                                type=option.get('type'),
                                action=option.get('action'))

    parser.add_argument('--no-patching',
                        help='Deactivate Gevent monkey patching.',
                        action='store_true',
                        default=False)

    parser.add_argument('--project-name', help='Project name.', default='N/A')

    #
    # distributed options
    #
    parser.add_argument('-a',
                        '--agents',
                        help='Number of agents to use.',
                        type=int)

    parser.add_argument('--zmq-receiver',
                        default=None,
                        help=('ZMQ socket where the runners send the events to'
                              ' (opened on the agent side).'))

    parser.add_argument('--zmq-publisher',
                        default=DEFAULT_PUBLISHER,
                        help='ZMQ socket where the test results messages '
                        'are published.')

    parser.add_argument('--ping-broker',
                        action='store_true',
                        default=False,
                        help='Pings the broker to get info, display it and '
                        'exits.')

    parser.add_argument('--check-cluster',
                        action='store_true',
                        default=False,
                        help='Runs a test on all agents then exits.')

    parser.add_argument('--purge-broker',
                        action='store_true',
                        default=False,
                        help='Stops all runs on the broker and exits.')

    parser.add_argument('-b',
                        '--broker',
                        help='Broker endpoint',
                        default=DEFAULT_FRONTEND)

    parser.add_argument('--user-id',
                        help='Name of the user who runs the test',
                        type=str,
                        default='undefined')

    outputs = [st.name for st in output_list()]
    outputs.sort()

    parser.add_argument('--batched',
                        action='store_true',
                        default=False,
                        help='Batch results in distributed mode')

    parser.add_argument('--quiet',
                        action='store_true',
                        default=False,
                        help='Do not print any log messages.')
    parser.add_argument('--output',
                        action='append',
                        default=['stdout'],
                        help='The output which will get the results',
                        choices=outputs)

    parser.add_argument('--attach',
                        help='Reattach to a distributed run',
                        action='store_true',
                        default=False)

    parser.add_argument('--detach',
                        help='Detach immediatly the current '
                        'distributed run',
                        action='store_true',
                        default=False)

    # Adds the per-output and per-runner options.
    add_options(RUNNERS, parser, fmt='--{name}-{option}')
    add_options(output_list(), parser, fmt='--output-{name}-{option}')

    args = parser.parse_args(sysargs)
    if args.config is not None:
        # second pass !
        config = Config(args.config)
        config_args = config.scan_args(parser, strip_prefixes=['loads'])
        if 'fqn' in config['loads']:
            config_args += [config['loads']['fqn']]
        args = parser.parse_args(args=sysargs + config_args)

    if args.quiet and 'stdout' in args.output:
        args.output.remove('stdout')

    return args, parser
Esempio n. 30
0
from flask import Flask, render_template, request, url_for, session, redirect, flash
from flask_bcrypt import Bcrypt
import os
from flask_pymongo import PyMongo
import requests
import urllib.parse
#끌어오기
import opgg_crawling
from time import sleep  #받아오기 속도조절
from konfig import Config
cc = Config("./conf.ini")

api_conf = cc.get_map("api")
app_conf = cc.get_map("app")
db_conf = cc.get_map("db")
app = Flask(__name__)
#DB와 비밀번호는 환경변수에서 가져온다.
app.config['SECRET_KEY'] = app_conf['SECRET_KEY']
app.config['MONGO_URI'] = db_conf['MONGO_URI']
apikey = api_conf['LOL_API_KEY']

mongo = PyMongo(app)
bcrypt = Bcrypt(app)


@app.route('/')
def index(name=None):

    tip_List = mongo.db.tip_List
    get_tips = tip_List.find().sort([['_id', -1]]).limit(10)
    tip_lists = []