示例#1
0
def init(**options):

    configure_logging(json=options['json'], verbose=options['verbose'])

    connection = Connection(host=options['mysql_host'],
                            port=options['mysql_port'],
                            user=options['mysql_user'],
                            password=options['mysql_password'],
                            charset='utf8mb4',
                            cursorclass=pymysql.cursors.DictCursor)

    create_db(connection, options['mysql_database'])

    connection_pool = PooledDB(
        creator=pymysql,
        mincached=1,
        maxcached=10,
        # max connections currently in use - doesn't
        # include cached connections
        maxconnections=50,
        blocking=True,
        host=options['mysql_host'],
        port=options['mysql_port'],
        user=options['mysql_user'],
        password=options['mysql_password'],
        database=options['mysql_database'],
        charset='utf8mb4',
        cursorclass=pymysql.cursors.DictCursor)
    shh_dao = ShhDao(connection_pool)

    shh_dao.create_secret_table()
示例#2
0
文件: main.py 项目: braedon/bottler
def server(**options):

    def shutdown():
        global SERVER_READY
        SERVER_READY = False

        def wait():
            # Sleep for a few seconds to allow for race conditions between sending
            # the SIGTERM and load balancers stopping sending traffic here.
            log.info('Shutdown: Sleeping %(sleep_s)s seconds.',
                     {'sleep_s': options['shutdown_sleep']})
            time.sleep(options['shutdown_sleep'])

            log.info('Shutdown: Waiting up to %(wait_s)s seconds for connections to close.',
                     {'wait_s': options['shutdown_sleep']})
            gevent_pool.join(timeout=options['shutdown_wait'])

            log.info('Shutdown: Exiting.')
            sys.exit()

        # Run in greenlet, as we can't block in a signal hander.
        gevent.spawn(wait)

    configure_logging(json=options['json'], verbose=options['verbose'])

    app = construct_app(**options)
    app = wsgi_log_middleware(app)

    with nice_shutdown(shutdown=shutdown):
        bottle.run(app,
                   host='0.0.0.0', port=options['port'],
                   server='gevent', spawn=gevent_pool,
                   # Disable default request logging - we're using middleware
                   quiet=True, error_log=None)
示例#3
0
def server(**options):
    def shutdown():
        shh.SERVER_READY = False

        def wait():
            # Sleep for a few seconds to allow for race conditions between sending
            # the SIGTERM and load balancers stopping sending traffic here.
            log.info('Shutdown: Sleeping %(sleep_s)s seconds.',
                     {'sleep_s': options['shutdown_sleep']})
            time.sleep(options['shutdown_sleep'])

            log.info(
                'Shutdown: Waiting up to %(wait_s)s seconds for connections to close.',
                {'wait_s': options['shutdown_sleep']})
            gevent_pool.join(timeout=options['shutdown_wait'])

            log.info('Shutdown: Exiting.')
            sys.exit()

        # Run in greenlet, as we can't block in a signal hander.
        gevent.spawn(wait)

    configure_logging(json=options['json'], verbose=options['verbose'])

    connection_pool = PooledDB(
        creator=pymysql,
        mincached=1,
        maxcached=10,
        # max connections currently in use - doesn't
        # include cached connections
        maxconnections=50,
        blocking=True,
        host=options['mysql_host'],
        port=options['mysql_port'],
        user=options['mysql_user'],
        password=options['mysql_password'],
        database=options['mysql_database'],
        charset='utf8mb4',
        cursorclass=pymysql.cursors.DictCursor)
    shh_dao = ShhDao(connection_pool)

    with options['oidc_public_key_file'] as file:
        public_key = file.read()
    token_decoder = TokenDecoder(public_key, options['oidc_iss'],
                                 options['oidc_client_id'])

    app = construct_app(shh_dao, token_decoder, **options)
    app = wsgi_log_middleware(app)

    with nice_shutdown(shutdown):
        bottle.run(
            app,
            host='0.0.0.0',
            port=options['port'],
            server='gevent',
            spawn=gevent_pool,
            # Disable default request logging - we're using middleware
            quiet=True,
            error_log=None)
示例#4
0
def twitter_worker(**options):

    configure_logging(json=options['json'], verbose=options['verbose'])

    es_client = Elasticsearch(options['es_node'], verify_certs=False)
    es_dao = GpcSupDao(es_client, options['es_site_index'],
                       options['es_resource_index'])

    with nice_shutdown():
        run_twitter_worker(es_dao, **options)
示例#5
0
def create_merged(input_dir="data_raw"):
    '''Do all steps up to merge'''
    configure_logging()

    logging.info("Starting preprocessing of MIMIC data")

    df_items = load_chartevents(input_dir=input_dir, output_file="data_processed/mimic_chartevents.csv")
    df_day = create_day_blocks(df_items, output_file="data_processed/mimic_day_blocks.csv")

    df_day = add_derived_columns(df_day)

    df_patient = get_admission_details(input_dir=input_dir, output_file="data_processed/mimic_admission_info.csv")

    df_proc = get_all_procedures(input_dir=input_dir, output_file="data_processed/mimic_procedures.csv")

    df_diagnoses = get_all_diagnoses(input_dir=input_dir, output_file="data_processed/mimic_diagnoses.csv")

    return get_merged_data(df_day, df_patient, df_proc, df_diagnoses, output_file="data_processed/mimic_processed.csv")
示例#6
0
def worker(**options):

    configure_logging(json=options['json'], verbose=options['verbose'])

    connection_pool = PooledDB(
        creator=pymysql,
        mincached=1,
        maxcached=1,
        # max connections currently in use - doesn't
        # include cached connections
        maxconnections=1,
        blocking=True,
        host=options['mysql_host'],
        port=options['mysql_port'],
        user=options['mysql_user'],
        password=options['mysql_password'],
        database=options['mysql_database'],
        charset='utf8mb4',
        cursorclass=pymysql.cursors.DictCursor)
    shh_dao = ShhDao(connection_pool)

    with nice_shutdown():
        run_worker(shh_dao, **options)
示例#7
0
def main(**options):

    def shutdown():
        kong_log_bridge.SERVER_READY = False

        def wait():
            # Sleep for a few seconds to allow for race conditions between sending
            # the SIGTERM and load balancers stopping sending traffic here.
            log.info('Shutdown: Sleeping %(sleep_s)s seconds.',
                     {'sleep_s': options['shutdown_sleep']})
            time.sleep(options['shutdown_sleep'])

            log.info('Shutdown: Waiting up to %(wait_s)s seconds for connections to close.',
                     {'wait_s': options['shutdown_sleep']})
            gevent_pool.join(timeout=options['shutdown_wait'])

            log.info('Shutdown: Exiting.')
            sys.exit()

        # Run in greenlet, as we can't block in a signal hander.
        gevent.spawn(wait)

    configure_logging(json=options['json'], verbose=options['verbose'],
                      log_level=options['log_level'])

    # Elasticsearch logs all requests at (at least) INFO level. Disable if log level isn't DEBUG.
    if not (options['log_level'] == 'DEBUG' or options['verbose']):
        logging.getLogger('elasticsearch').setLevel(logging.WARNING)

    if options['es_basic_user'] and not options['es_basic_password']:
        click.BadOptionUsage('es_basic_user', 'Username provided with no password.')
    elif not options['es_basic_user'] and options['es_basic_password']:
        click.BadOptionUsage('es_basic_password', 'Password provided with no username.')
    elif options['es_basic_user']:
        http_auth = (options['es_basic_user'], options['es_basic_password'])
    else:
        http_auth = None

    if not options['es_ca_certs'] and options['es_client_cert']:
        click.BadOptionUsage('es_client_cert', '--es-client-cert can only be used when --es-ca-certs is provided.')
    elif not options['es_ca_certs'] and options['es_client_key']:
        click.BadOptionUsage('es_client_key', '--es-client-key can only be used when --es-ca-certs is provided.')
    elif options['es_client_cert'] and not options['es_client_key']:
        click.BadOptionUsage('es_client_cert', '--es-client-key must be provided when --es-client-cert is used.')
    elif not options['es_client_cert'] and options['es_client_key']:
        click.BadOptionUsage('es_client_key', '--es-client-cert must be provided when --es-client-key is used.')

    if options['es_ca_certs']:
        es_client = Elasticsearch(options['es_node'],
                                  verify_certs=True,
                                  ca_certs=options['es_ca_certs'],
                                  client_cert=options['es_client_cert'],
                                  client_key=options['es_client_key'],
                                  http_auth=http_auth,
                                  maxsize=options['es_max_connections'])
    else:
        es_client = Elasticsearch(options['es_node'],
                                  verify_certs=False,
                                  http_auth=http_auth,
                                  maxsize=options['es_max_connections'])

    app = construct_app(es_client, **options)
    app = wsgi_log_middleware(app)

    with nice_shutdown(shutdown):
        bottle.run(app,
                   host='0.0.0.0', port=options['port'],
                   server='gevent', spawn=gevent_pool,
                   # Disable default request logging - we're using middleware
                   quiet=True, error_log=None)
示例#8
0
from utils.logging import configure_logging
from fastapi import FastAPI, Path, HTTPException
from finders import finders
from utils.exceptions import NotFoundException
import uvicorn
import logging
import asyncio

configure_logging()

app = FastAPI()


@app.get('/finders', summary='List all finders')
def list_finders():
  finders_list = []
  for key, value in finders.items():
    finders_list.append(key)
  return finders_list


@app.get(('/all/{name}'), summary='Search all finders')
async def get_all(name: str = Path(None, title='Company name to search')):
  name = name.replace('+', ' ')
  all_finders = list_finders()
  results = {}

  async def get_one(finder: str):
    try:
      logging.info(f'looking in {finder} for {name}')
      result = await finders[finder](name)