def test_find_caller(self, monkeypatch): logger = LoggerFactory()() log_handle = call_recorder(lambda x: None) monkeypatch.setattr(logger, 'handle', log_handle) logger.error('Test') log_record = log_handle.calls[0].args[0] assert log_record.funcName == 'test_find_caller' assert log_record.name == __name__ assert log_record.filename == os.path.basename(__file__)
from evergreen.patch import Patch from evergreen.performance_results import PerformanceData from evergreen.project import Project from evergreen.stats import TaskStats, TestStats from evergreen.task import Task from evergreen.task_reliability import TaskReliability from evergreen.tst import Tst from evergreen.util import evergreen_input_to_output, iterate_by_time_window from evergreen.version import Requester, Version try: from urlparse import urlparse except ImportError: from urllib.parse import urlparse # type: ignore structlog.configure(logger_factory=LoggerFactory()) LOGGER = structlog.getLogger(__name__) CACHE_SIZE = 5000 DEFAULT_LIMIT = 100 MAX_RETRIES = 3 START_WAIT_TIME_SEC = 2 MAX_WAIT_TIME_SEC = 5 class EvergreenApi(object): """Base methods for building API objects.""" def __init__( self, api_server: str = DEFAULT_API_SERVER, auth: Optional[EvgAuth] = None,
from celery import Celery from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.admin import Admin from flask_marshmallow import Marshmallow import structlog from structlog.stdlib import LoggerFactory from structlog.threadlocal import wrap_dict structlog.configure( context_class=wrap_dict(dict), logger_factory=LoggerFactory(), ) app = Flask(__name__) app.config.from_object('settings') admin = Admin(app, name='DynoUp', template_mode='bootstrap3') db = SQLAlchemy(app) ma = Marshmallow(app) def make_celery(app): celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL']) celery.conf.update(app.config) TaskBase = celery.Task class ContextTask(TaskBase): abstract = True
def test_sets_correct_logger(self): assert logging.getLoggerClass() is logging.Logger LoggerFactory() assert logging.getLoggerClass() is _FixedFindCallerLogger
import logging import os import re from urllib.parse import urljoin, urlparse import attr import grequests import requests as r from cached_property import cached_property from defusedxml import ElementTree from structlog import configure, get_logger from structlog.stdlib import LoggerFactory from tqdm import tqdm logging.basicConfig() configure(logger_factory=LoggerFactory()) logger = get_logger(__name__) @attr.s class Funk: sitemap_url = attr.ib() _concurrent = attr.ib(default=None) _verify_ssl = attr.ib(default=True) _verify_response = attr.ib(default=False) _force_https = attr.ib(default=None) _replace = attr.ib(default=None) _session = attr.ib(init=False, repr=None) _results = attr.ib(default=attr.Factory(list), repr=None)
def execute(args: Namespace) -> None: """Run the service according to the args.""" from hathorlib.client import HathorClient from txstratum.api import App from txstratum.manager import TxMiningManager from txstratum.utils import start_logging # Configure log. start_logging() if os.path.exists(args.log_config): logging.config.fileConfig(args.log_config) from structlog.stdlib import LoggerFactory structlog.configure(logger_factory=LoggerFactory()) logger.info('tx-mining-service', backend=args.backend) logger.info('Configuring log...', log_config=args.log_config) else: logger.info('tx-mining-service', backend=args.backend) logger.info('Log config file not found; using default configuration.', log_config=args.log_config) # Set up all parts. loop = asyncio.get_event_loop() backend = HathorClient(args.backend) manager = TxMiningManager( backend=backend, address=args.address, ) loop.run_until_complete(backend.start()) loop.run_until_complete(manager.start()) server = loop.run_until_complete( loop.create_server(manager, '0.0.0.0', args.stratum_port)) if args.prometheus: from txstratum.prometheus import PrometheusExporter metrics = PrometheusExporter(manager, args.prometheus) metrics.start() api_app = App(manager, max_tx_weight=args.max_tx_weight, max_timestamp_delta=args.max_timestamp_delta, tx_timeout=args.tx_timeout, fix_invalid_timestamp=args.fix_invalid_timestamp) logger.info('API Configuration', max_tx_weight=api_app.max_tx_weight, tx_timeout=api_app.tx_timeout, max_timestamp_delta=api_app.max_timestamp_delta, fix_invalid_timestamp=api_app.fix_invalid_timestamp) web_runner = web.AppRunner(api_app.app) loop.run_until_complete(web_runner.setup()) site = web.TCPSite(web_runner, '0.0.0.0', args.api_port) loop.run_until_complete(site.start()) try: logger.info('Stratum Server running at 0.0.0.0:{}...'.format( args.stratum_port)) logger.info('TxMining API running at 0.0.0.0:{}...'.format( args.api_port)) if args.testnet: logger.info('Running with testnet config file') loop.run_forever() except KeyboardInterrupt: logger.info('Stopping...') server.close() loop.run_until_complete(server.wait_closed()) loop.run_until_complete(backend.stop()) loop.close()