Ejemplo n.º 1
0
import gevent
import time
import gevent.monkey
import slow_memcached_server
gevent.monkey.patch_time()

import greenify
greenify.greenify()
import libmc
assert greenify.patch_lib(libmc._client.__file__)
mc = libmc.Client(["127.0.0.1:%d" % slow_memcached_server.PORT])
mc.config(libmc._client.MC_POLL_TIMEOUT,
          slow_memcached_server.BLOCKING_SECONDS * 1000 * 2)  # ms


stack = []


def mc_sleep():
    print 'begin mc sleep'
    stack.append('mc_sleep_begin')
    assert mc.set('foo', 'bar'), "Run `python slow_memcached_server.py` first"
    stack.append('mc_sleep_end')
    print 'end mc sleep'


def singer():
    i = 0
    for i in range(6):
        i += 1
        print '[%d] Oh, jingle bells, jingle bells, Jingle all the way.' % i
Ejemplo n.º 2
0
def run(base_dir, start_gunicorn_app=True, options=None):
    options = options or {}

    # Store a pidfile before doing anything else
    store_pidfile(base_dir)

    # For dumping stacktraces
    register_diag_handlers()

    # Capture warnings to log files
    logging.captureWarnings(True)

    # Start initializing the server now
    os.chdir(base_dir)

    try:
        import pymysql
        pymysql.install_as_MySQLdb()
    except ImportError:
        pass

    # We're doing it here even if someone doesn't use PostgreSQL at all
    # so we're not suprised when someone suddenly starts using PG.
    # TODO: Make sure it's registered for each of the subprocess
    psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
    psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)

    # We know we don't need warnings because users may explicitly configure no certificate validation.
    # We don't want for urllib3 to warn us about it.
    import requests as _r
    _r.packages.urllib3.disable_warnings()

    repo_location = os.path.join(base_dir, 'config', 'repo')

    # Configure the logging first, before configuring the actual server.
    logging.addLevelName('TRACE1', TRACE1)
    logging_conf_path = os.path.join(repo_location, 'logging.conf')

    with open(logging_conf_path) as f:
        logging_config = yaml.load(f)
        dictConfig(logging_config)

    logger = logging.getLogger(__name__)
    kvdb_logger = logging.getLogger('zato_kvdb')

    crypto_manager = ServerCryptoManager(repo_location,
                                         secret_key=options['secret_key'],
                                         stdin_data=read_stdin_data())
    secrets_config = ConfigObj(os.path.join(repo_location, 'secrets.conf'),
                               use_zato=False)
    server_config = get_config(repo_location,
                               'server.conf',
                               crypto_manager=crypto_manager,
                               secrets_conf=secrets_config)
    pickup_config = get_config(repo_location, 'pickup.conf')
    sio_config = get_config(repo_location,
                            'simple-io.conf',
                            needs_user_config=False)
    sso_config = get_config(repo_location, 'sso.conf', needs_user_config=False)
    normalize_sso_config(sso_config)

    # Now that we have access to server.conf, greenify libraries required to be made greenlet-friendly,
    # assuming that there are any - otherwise do not do anything.
    to_greenify = []
    for key, value in server_config.get('greenify', {}).items():
        if asbool(value):
            if not os.path.exists(key):
                raise ValueError('No such path `{}`'.format(key))
            else:
                to_greenify.append(key)

    # Go ahead only if we actually have anything to greenify
    if to_greenify:
        import greenify
        greenify.greenify()
        for name in to_greenify:
            result = greenify.patch_lib(name)
            if not result:
                raise ValueError(
                    'Library `{}` could not be greenified'.format(name))
            else:
                logger.info('Greenified library `%s`', name)

    server_config.main.token = server_config.main.token.encode('utf8')

    # Do not proceed unless we can be certain our own preferred address or IP can be obtained.
    preferred_address = server_config.preferred_address.get('address')

    if not preferred_address:
        preferred_address = get_preferred_ip(server_config.main.gunicorn_bind,
                                             server_config.preferred_address)

    if not preferred_address and not server_config.server_to_server.boot_if_preferred_not_found:
        msg = 'Unable to start the server. Could not obtain a preferred address, please configure [bind_options] in server.conf'
        logger.warn(msg)
        raise Exception(msg)

    # Create the startup callable tool as soon as practical
    startup_callable_tool = StartupCallableTool(server_config)

    # Run the hook before there is any server object created
    startup_callable_tool.invoke(SERVER_STARTUP.PHASE.FS_CONFIG_ONLY,
                                 kwargs={
                                     'server_config': server_config,
                                     'pickup_config': pickup_config,
                                     'sio_config': sio_config,
                                     'sso_config': sso_config,
                                 })

    # New in 2.0 - Start monitoring as soon as possible
    if server_config.get('newrelic', {}).get('config'):
        import newrelic.agent
        newrelic.agent.initialize(server_config.newrelic.config,
                                  server_config.newrelic.environment or None,
                                  server_config.newrelic.ignore_errors or None,
                                  server_config.newrelic.log_file or None,
                                  server_config.newrelic.log_level or None)

    zunicorn.SERVER_SOFTWARE = server_config.misc.get('http_server_header',
                                                      'Zato')

    # Store KVDB config in logs, possibly replacing its password if told to
    kvdb_config = get_kvdb_config_for_log(server_config.kvdb)
    kvdb_logger.info('Main process config `%s`', kvdb_config)

    # New in 2.0 hence optional
    user_locale = server_config.misc.get('locale', None)
    if user_locale:
        locale.setlocale(locale.LC_ALL, user_locale)
        value = 12345
        logger.info('Locale is `%s`, amount of %s -> `%s`', user_locale, value,
                    locale.currency(value, grouping=True).decode('utf-8'))

    # Makes queries against Postgres asynchronous
    if asbool(server_config.odb.use_async_driver
              ) and server_config.odb.engine == 'postgresql':
        make_psycopg_green()

    if server_config.misc.http_proxy:
        os.environ['http_proxy'] = server_config.misc.http_proxy

    # Basic components needed for the server to boot up
    kvdb = KVDB()
    odb_manager = ODBManager(well_known_data=ZATO_CRYPTO_WELL_KNOWN_DATA)
    sql_pool_store = PoolStore()

    service_store = ServiceStore()
    service_store.odb = odb_manager
    service_store.services = {}

    server = ParallelServer()
    server.odb = odb_manager
    server.service_store = service_store
    server.service_store.server = server
    server.sql_pool_store = sql_pool_store
    server.service_modules = []
    server.kvdb = kvdb

    # Assigned here because it is a circular dependency
    odb_manager.parallel_server = server

    zato_gunicorn_app = ZatoGunicornApplication(server, repo_location,
                                                server_config.main,
                                                server_config.crypto)

    server.has_fg = options.get('fg')
    server.crypto_manager = crypto_manager
    server.odb_data = server_config.odb
    server.host = zato_gunicorn_app.zato_host
    server.port = zato_gunicorn_app.zato_port
    server.repo_location = repo_location
    server.user_conf_location = os.path.join(server.repo_location, 'user-conf')
    server.base_dir = base_dir
    server.logs_dir = os.path.join(server.base_dir, 'logs')
    server.tls_dir = os.path.join(server.base_dir, 'config', 'repo', 'tls')
    server.static_dir = os.path.join(server.base_dir, 'config', 'repo',
                                     'static')
    server.json_schema_dir = os.path.join(server.base_dir, 'config', 'repo',
                                          'schema', 'json')
    server.fs_server_config = server_config
    server.fs_sql_config = get_config(repo_location,
                                      'sql.conf',
                                      needs_user_config=False)
    server.pickup_config = pickup_config
    server.logging_config = logging_config
    server.logging_conf_path = logging_conf_path
    server.sio_config = sio_config
    server.sso_config = sso_config
    server.user_config.update(server_config.user_config_items)
    server.preferred_address = preferred_address
    server.sync_internal = options['sync_internal']
    server.jwt_secret = server.fs_server_config.misc.jwt_secret.encode('utf8')
    server.startup_callable_tool = startup_callable_tool
    server.is_sso_enabled = server.fs_server_config.component_enabled.sso
    if server.is_sso_enabled:
        server.sso_api = SSOAPI(server, sso_config, None,
                                crypto_manager.encrypt, crypto_manager.decrypt,
                                crypto_manager.hash_secret,
                                crypto_manager.verify_hash, new_user_id)

    # Remove all locks possibly left over by previous server instances
    kvdb.component = 'master-proc'
    clear_locks(kvdb, server_config.main.token, server_config.kvdb,
                crypto_manager.decrypt)

    # New in 2.0.8
    server.return_tracebacks = asbool(
        server_config.misc.get('return_tracebacks', True))
    server.default_error_message = server_config.misc.get(
        'default_error_message', 'An error has occurred')

    # Turn the repo dir into an actual repository and commit any new/modified files
    RepoManager(repo_location).ensure_repo_consistency()

    # New in 2.0 so it's optional.
    profiler_enabled = server_config.get('profiler', {}).get('enabled', False)

    # New in 2.0 so it's optional.
    sentry_config = server_config.get('sentry')

    dsn = sentry_config.pop('dsn', None)
    if dsn:

        from raven import Client
        from raven.handlers.logging import SentryHandler

        handler_level = sentry_config.pop('level')
        client = Client(dsn, **sentry_config)

        handler = SentryHandler(client=client)
        handler.setLevel(getattr(logging, handler_level))

        logger = logging.getLogger('')
        logger.addHandler(handler)

        for name in logging.Logger.manager.loggerDict:
            if name.startswith('zato'):
                logger = logging.getLogger(name)
                logger.addHandler(handler)

    if asbool(profiler_enabled):
        profiler_dir = os.path.abspath(
            os.path.join(base_dir, server_config.profiler.profiler_dir))
        server.on_wsgi_request = ProfileMiddleware(
            server.on_wsgi_request,
            log_filename=os.path.join(profiler_dir,
                                      server_config.profiler.log_filename),
            cachegrind_filename=os.path.join(
                profiler_dir, server_config.profiler.cachegrind_filename),
            discard_first_request=server_config.profiler.discard_first_request,
            flush_at_shutdown=server_config.profiler.flush_at_shutdown,
            path=server_config.profiler.url_path,
            unwind=server_config.profiler.unwind)

    # New in 2.0 - set environmet variables for servers to inherit
    os_environ = server_config.get('os_environ', {})
    for key, value in os_environ.items():
        os.environ[key] = value

    # Run the hook right before the Gunicorn-level server actually starts
    startup_callable_tool.invoke(SERVER_STARTUP.PHASE.IMPL_BEFORE_RUN,
                                 kwargs={
                                     'zato_gunicorn_app': zato_gunicorn_app,
                                 })

    # Run the app at last
    if start_gunicorn_app:
        zato_gunicorn_app.run()
    else:
        return zato_gunicorn_app.zato_wsgi_app
Ejemplo n.º 3
0
import logging
import time
import datetime
from collections import deque
from functools import wraps
import itertools
import torndb
import tornado.ioloop
from torndb import Row
from tornado.gen import coroutine, Return
from tornado import locks
from functools import partial
from tornado.options import options

assert greenify.patch_lib("/usr/lib64/mysql/libmysqlclient_r.so")
mutex = locks.Lock()


def enable_debug():
    import inspect
    import greenlet
    import logging

    def trace_green(event, args):
        src, target = args
        if event == "switch":
            logging.info("from %s switch to %s" % (src, target))
        elif event == "throw":
            logging.info("from %s throw exception to %s" % (src, target))
        if src.gr_frame:
Ejemplo n.º 4
0
from __future__ import absolute_import
import sys
import time
import greenify
greenify.greenify()
import pylibmc
from tornado.ioloop import IOLoop
from tornado.gen import coroutine
from tornado.web import RequestHandler, Application
from gtornado import green

greenify.patch_lib("/usr/lib/x86_64-linux-gnu/libmemcached.so")


class MCPool(green.Pool):
    def create_raw_conn(self):
        return pylibmc.Client(["localhost"])
    
mcpool = MCPool(200)

def call_mc(i):
    try:
        mc = mcpool.get_conn()
        mc.set("timestamp", str(time.time()))
        return mc.get_stats()
    finally:
        mcpool.release(mc)

class Hello(RequestHandler):
    @coroutine
    def get(self):
Ejemplo n.º 5
0
# coding: utf-8
from __future__ import print_function

# greenify
import greenify
greenify.greenify()

# python patch
import gevent
import gevent.monkey
gevent.monkey.patch_all()

import sys
import time
import mod_http_head
assert greenify.patch_lib(mod_http_head.__file__)
import fake_slow_http_server

stack = []


def c_http_head_check(addr):
    stack.append(('begin', addr, 'c'))
    print('%.5f head %s begin' % (time.time(), addr), file=sys.stderr)
    ret = mod_http_head.http_head(*addr)
    print('%.5f head %s end' % (time.time(), addr), file=sys.stderr)
    stack.append(('end', addr, 'c'))
    assert ret == 1


def python_http_head_check(addr):
Ejemplo n.º 6
0
import gevent
import time
import gevent.monkey
import slow_memcached_server
gevent.monkey.patch_time()

import greenify
greenify.greenify()
import libmc
for so_path in libmc.DYNAMIC_LIBRARIES:
    assert greenify.patch_lib(so_path)
mc = libmc.Client(["127.0.0.1:%d" % slow_memcached_server.PORT])
mc.config(libmc._client.MC_POLL_TIMEOUT,
          slow_memcached_server.BLOCKING_SECONDS * 1000 * 2)  # ms

stack = []


def mc_sleep():
    print 'begin mc sleep'
    stack.append('mc_sleep_begin')
    assert mc.set('foo', 'bar')
    stack.append('mc_sleep_end')
    print 'end mc sleep'


def singer():
    i = 0
    for i in range(6):
        i += 1
        print '[%d] Oh, jingle bells, jingle bells, Jingle all the way.' % i
Ejemplo n.º 7
0
# -*- coding:utf-8 -*-
from tornado.ioloop import IOLoop
from tornado.web import RequestHandler, Application
from tornado.gen import coroutine
from gtornado import green

import MySQLdb.constants
import MySQLdb.converters
import MySQLdb.cursors

import greenify
greenify.greenify()

# green.enable_debug()
assert greenify.patch_lib("/usr/lib/x86_64-linux-gnu/libmysqlclient.so")
conn_params = {
    "host": "10.86.11.116",
    "port": 3306,
    "user": "******",
    "passwd": "123456",
    "db": "mywork",
    "charset": "utf8"
}


def test_select():
    db = None
    try:
        db = MySQLdb.connect(**conn_params)
        db.autocommit(True)
        cursor = db.cursor()
Ejemplo n.º 8
0
# -*- coding:utf-8 -*-
from tornado.ioloop import IOLoop
from tornado.web import Application
from gtornado import Handler
import MySQLdb.constants
import MySQLdb.converters
import MySQLdb.cursors
from storm.locals import *

import greenify

greenify.greenify()

# green.enable_debug()
assert greenify.patch_lib("/usr/lib/x86_64-linux-gnu/libmysqlclient.so")

conn_params = {
    "host": "10.86.11.116",
    "port": 3306,
    "user": "******",
    "passwd": "123456",
    "db": "mywork",
    "charset": "utf8",
}


columns = ("id", "phone", "home", "office")


class AddressBook(object):
    __storm_table__ = "address_book"
Ejemplo n.º 9
0
from __future__ import absolute_import
import sys
import time
import greenify

greenify.greenify()
import pylibmc
from tornado.ioloop import IOLoop
from tornado.gen import coroutine
from tornado.web import RequestHandler, Application
from gtornado import green

greenify.patch_lib("/usr/lib/x86_64-linux-gnu/libmemcached.so")


class MCPool(green.Pool):
    def create_raw_conn(self):
        return pylibmc.Client(["localhost"])


mcpool = MCPool(200)


def call_mc(i):
    try:
        mc = mcpool.get_conn()
        mc.set("timestamp", str(time.time()))
        return mc.get_stats()
    finally:
        mcpool.release(mc)
Ejemplo n.º 10
0
# coding: utf-8
import os
import greenify
greenify.greenify()

ungreen_lib_path = os.environ[
    'UNGREEN_LIB_PATH']  # path of 'libmysqlclient.so'
for path in ungreen_lib_path.split(':'):
    assert greenify.patch_lib(path)

from smsserver import app  # noqa
Ejemplo n.º 11
0
import gevent
import time
import gevent.monkey
import slow_memcached_server
gevent.monkey.patch_time()

import greenify
greenify.greenify()
import libmc
assert greenify.patch_lib(libmc._client.__file__)
mc = libmc.Client(["127.0.0.1:%d" % slow_memcached_server.PORT])
mc.config(libmc._client.MC_POLL_TIMEOUT,
          slow_memcached_server.BLOCKING_SECONDS * 1000 * 2)  # ms

stack = []


def mc_sleep():
    print 'begin mc sleep'
    stack.append('mc_sleep_begin')
    assert mc.set('foo', 'bar'), "Run `python slow_memcached_server.py` first"
    stack.append('mc_sleep_end')
    print 'end mc sleep'


def singer():
    i = 0
    for i in range(6):
        i += 1
        print '[%d] Oh, jingle bells, jingle bells, Jingle all the way.' % i
        stack.append('sing')
Ejemplo n.º 12
0
gevent 目前只能针对纯 python 打补丁,遇到 C 语言写的库(如 MySQLdb 等等)无能为力。
而现在 greenify 库针对这方面作了改善,允许 C 语言库也打上补丁:

https://github.com/douban/greenify


用法如:

import greenify
greenify.greenify()

assert greenify.patch_lib('/usr/lib/libmemcached.so') # 必须这样给 C 语言库打上补丁,后面才可以用


from gevent import monkey
monkey.patch_all() # greenify 打上补丁之后, gevent 就可以正常使用

# 下面跟 gevent 一样用法
...
Ejemplo n.º 13
0
# coding: utf-8

# greenify
import greenify
greenify.greenify()

# python patch
import gevent
import gevent.monkey
gevent.monkey.patch_all()

import sys
import time
import mod_http_head
assert greenify.patch_lib(mod_http_head.__file__)
import fake_slow_http_server

stack = []


def c_http_head_check(addr):
    stack.append(('begin', addr, 'c'))
    print >> sys.stderr, '%.5f head %s begin' % (time.time(), addr)
    ret = mod_http_head.http_head(*addr)
    print >> sys.stderr, '%.5f head %s end' % (time.time(), addr)
    stack.append(('end', addr, 'c'))
    assert ret == 1


def python_http_head_check(addr):
    import httplib
Ejemplo n.º 14
0
# coding: utf-8
import os
import greenify
greenify.greenify()


ungreen_lib_path = os.environ['UNGREEN_LIB_PATH']  # path of 'libmysqlclient.so'
for path in ungreen_lib_path.split(':'):
    assert greenify.patch_lib(path)


from smsserver import app       # noqa
Ejemplo n.º 15
0
import gevent
import time
import gevent.monkey
import slow_memcached_server
gevent.monkey.patch_time()

import greenify
greenify.greenify()
import libmc
for so_path in libmc.DYNAMIC_LIBRARIES:
    assert greenify.patch_lib(so_path)
mc = libmc.Client(["127.0.0.1:%d" % slow_memcached_server.PORT])
mc.config(libmc._client.MC_POLL_TIMEOUT,
          slow_memcached_server.BLOCKING_SECONDS * 1000 * 2)  # ms


stack = []


def mc_sleep():
    print 'begin mc sleep'
    stack.append('mc_sleep_begin')
    assert mc.set('foo', 'bar')
    stack.append('mc_sleep_end')
    print 'end mc sleep'


def singer():
    i = 0
    for i in range(6):
        i += 1