Пример #1
0
def test_logger():
    ioloop = IOLoop.current()
    logger = Logger()
    assert logger is Logger()

    def main():
        logger.debug("debug_msg", extra={"A": 1, "B": 2.0})
        logger.info("info_msg", extra={"A": 1, "B": 2})
        logger.warning("warning_msg", extra={"A": 1, "B": 2.4})
        logger.warn("warn_msg", extra={"A": 1, "B": 2.4})
        logger.error("error_msg", extra={"A": 1, "BoolFlag": False})
        logger.debug("debug_mesg")

        logger.info("message without attributes")
        logger.error("message with converted attributes",
                     extra={1: "BAD_ATTR"})
        logger.error("message with bad extra", extra=("A", "B"))
        logger.error("message with bad extra", extra={"ATTR": [1, 2, 3]})
        logger.error("format %s %d", "str", 100, extra={"level": "error"})
        logger.info("badformat %s %d", "str", extra={"level": "error"})

        try:
            l = logging.getLogger("cocaine.testlogger")
            lh = CocaineHandler()
            l.setLevel(logging.DEBUG)
            l.addHandler(lh)
            l.info("logged via logging %s", "handler")
        finally:
            l.removeHandler(lh)

    ioloop.add_timeout(time.time() + 3, ioloop.stop)
    ioloop.add_callback(main)
    ioloop.start()
Пример #2
0
def enable_logging(options):
    if options.logging is None or options.logging.lower() == "none":
        return

    general_logger = logging.getLogger("cocaine.proxy.general")
    general_logger.setLevel(getattr(logging, options.logging.upper()))
    general_formatter = logging.Formatter(options.generallogfmt,
                                          datefmt=options.datefmt)

    access_logger = logging.getLogger("cocaine.proxy.access")
    access_logger.setLevel(getattr(logging, options.logging.upper()))
    access_formatter = logging.Formatter(options.accesslogfmt,
                                         datefmt=options.datefmt)

    cocainelogger = None
    if options.logframework:
        cocainelogger = logging.getLogger("cocaine.baseservice")
        cocainelogger.setLevel(getattr(logging, options.logging.upper()))

    if options.log_to_cocaine:
        Logger().target = "tornado-proxy"
        handler = CocaineHandler()
        general_logger.addHandler(handler)
        if cocainelogger:
            cocainelogger.addHandler(handler)

        access_logger.addHandler(handler)

    if options.log_file_prefix:
        handler = logging.handlers.WatchedFileHandler(
            filename=options.log_file_prefix, )
        handler.setFormatter(general_formatter)
        general_logger.addHandler(handler)

        handler = logging.handlers.WatchedFileHandler(
            filename=options.log_file_prefix, )
        handler.setFormatter(access_formatter)
        access_logger.addHandler(handler)

        if cocainelogger:
            cocainehandler = logging.handlers.WatchedFileHandler(
                filename=options.log_file_prefix + "framework.log")
            cocainehandler.setFormatter(general_formatter)
            cocainelogger.addHandler(cocainehandler)

    if options.log_to_stderr or (options.log_to_stderr is None
                                 and not general_logger.handlers):
        stderr_handler = logging.StreamHandler()
        stderr_handler.setFormatter(general_formatter)

        general_logger.addHandler(stderr_handler)
        if cocainelogger:
            cocainelogger.addHandler(stderr_handler)

        stderr_handler = logging.StreamHandler()
        stderr_handler.setFormatter(access_formatter)
        access_logger.addHandler(stderr_handler)
Пример #3
0
 def setUp(self):
     self.logger = Logger()
Пример #4
0
def main(uuid_prefix, apps_poll_interval, port, uniresis_stub_uuid,
         dup_to_console, console_log_level):

    shared_status = SharedStatus(name=MODULE_NAME)

    config = Config(shared_status)
    config.update()

    committed_state = CommittedState()
    committed_state.control_filter = config.control_filter

    if console_log_level is not None:
        config.console_log_level = console_log_level

    input_queue = queues.Queue(config.input_queue_size)
    filter_queue = queues.Queue()
    control_queue = queues.Queue()

    state_dumper_queue = queues.Queue()
    metrics_dumper_queue = queues.Queue()

    logger = Logger(config.locator_endpoints)

    unicorn = SecureServiceFabric.make_secure_adaptor(
        Service(config.unicorn_name, config.locator_endpoints),
        *config.secure,
        endpoints=config.locator_endpoints)

    node = Service(config.node_name, config.locator_endpoints)

    uniresis = catchup_an_uniresis(uniresis_stub_uuid,
                                   config.locator_endpoints)

    sentry_wrapper = SentryClientWrapper(logger,
                                         dsn=config.sentry_dsn,
                                         revision=__version__)

    context = Context(LoggerSetup(logger, dup_to_console), config, __version__,
                      sentry_wrapper, shared_status)

    if not apps_poll_interval:
        apps_poll_interval = config.apps_poll_interval_sec

    sharding_setup = ShardingSetup(context, uniresis)

    control_filter = burlak.ControlFilterListener(context, unicorn,
                                                  filter_queue, input_queue)

    acquirer = burlak.StateAcquirer(context, sharding_setup, input_queue)
    workers_distribution = dict()
    state_processor = burlak.StateAggregator(
        context,
        node,
        committed_state,
        filter_queue,
        input_queue,
        control_queue,
        state_dumper_queue,
        apps_poll_interval,
        workers_distribution,
    )

    apps_elysium = burlak.AppsElysium(context, committed_state, node,
                                      control_queue, state_dumper_queue)

    if not uuid_prefix:
        uuid_prefix = config.uuid_path

    feedback_dumper = burlak.UnicornDumper(context, unicorn,
                                           sharding_setup.get_feedback_route,
                                           state_dumper_queue)
    metrics_dumper = burlak.UnicornDumper(context, unicorn,
                                          sharding_setup.get_metrics_route,
                                          metrics_dumper_queue)

    # run async poll tasks in date flow reverse order, from sink to source
    io_loop = IOLoop.current()

    io_loop.spawn_callback(control_filter.subscribe_to_control_filter)
    io_loop.spawn_callback(apps_elysium.blessing_road)
    io_loop.spawn_callback(state_processor.process_loop)

    io_loop.spawn_callback(feedback_dumper.listen_for_events)
    io_loop.spawn_callback(metrics_dumper.listen_for_events)
    io_loop.spawn_callback(
        lambda: acquirer.subscribe_to_state_updates(unicorn))

    qs = dict(input=input_queue, control=control_queue)
    units = dict(state_acquisition=acquirer,
                 state_dispatch=state_processor,
                 elysium=apps_elysium)

    cfg_port, prefix = config.web_endpoint

    if not port:
        port = cfg_port

    metrics_gatherer = SysMetricsGatherer()
    io_loop.spawn_callback(metrics_gatherer.gather)

    try:
        uptime = Uptime()
        wopts = WebOptions(
            prefix,
            port,
            uptime,
            uniresis,
            committed_state,
            metrics_gatherer,
            qs,
            units,
            workers_distribution,
            __version__,
        )
        web_app = make_web_app_v1(wopts)  # noqa F841
        status_app = make_status_web_handler(  # noqa F841
            shared_status, config.status_web_path, config.status_port)

        click.secho('orca is starting...', fg='green')
        IOLoop.current().start()
    except Exception as e:
        click.secho('error while spawning service: {}'.format(e), fg='red')
Пример #5
0
import urllib2

import msgpack

from cocaine.logger import Logger
from flask import Flask, request
from flask import abort, render_template

from flowmastermind.auth import auth_controller
from flowmastermind.config import config
from flowmastermind.error import ApiResponseError, AuthenticationError, AuthorizationError
from flowmastermind.jobs import job_types, job_types_groups, job_statuses
from flowmastermind.response import JsonResponse


logging = Logger()

app = Flask(__name__)


DEFAULT_DT_FORMAT = '%Y-%m-%d %H:%M:%S'
JOBS_FILTER_DT_FIELD_FORMAT = '%Y/%m/%d %H:%M'


def json_response(func):

    @wraps(func)
    def wrapper(*args, **kwargs):
        try:
            res = {'status': 'success',
                   'response': func(*args, **kwargs)}
Пример #6
0
#!/usr/bin/env python
from cocaine.worker import Worker
from cocaine.logger import Logger
from cocaine.decorators import http

import uuid

log = Logger(endpoints=(("2a02:6b8:0:1a16:556::200", 10053), ))


@http
def http_echo(request, response):

    req_id = uuid.uuid4().hex

    log.info("request %s: start" % req_id)

    req = yield request.read()

    log.info("request %s: got body of %s bytes" % (req_id, len(req.body)))

    response.write_head(200, {})

    log.info("request %s: responding with original body" % req_id)

    response.write(req.body)

    log.info("request %s: done" % req_id)


if __name__ == '__main__':
Пример #7
0
#!/usr/bin/env python

import msgpack

from cocaine.logger import Logger
from cocaine.worker import Worker
from cocaine.services import Service

from cocaine.decorators import http

log = Logger()


def echo(req, resp):
    log.error("on enter")
    msg = yield req.read()
    log.error("on answer")
    resp.write(str(msg))
    log.error("on leave")
    resp.close()


def inc(req, resp):
    log.error("on enter")
    msg = yield req.read()
    log.error("on answer")
    resp.write(str(int(msg) + 1))
    log.error("on leave")
    resp.close()