def test_with_custom_handler(self): logger = logging.getLogger('bar') handler = logging.NullHandler() logger.addHandler(handler) logger = get_logger('bar') assert logger.handlers[0] is handler
def test_with_custom_handler(self): l = logging.getLogger('bar') handler = logging.NullHandler() l.addHandler(handler) l = get_logger('bar') self.assertIs(l.handlers[0], handler)
def setup_logging(loglevel=logging.DEBUG, loggers=['kombu.connection', 'kombu.channel']): """Setup logging to stdout.""" for logger_name in loggers: logger = get_logger(logger_name) logger.addHandler(logging.StreamHandler()) logger.setLevel(loglevel)
from threading import Condition, Event, Lock, Thread from time import time, sleep, mktime from datetime import datetime, timedelta from kombu.log import get_logger VERSION = (1, 0, 0) __version__ = ".".join(map(str, VERSION)) __author__ = "Ask Solem" __contact__ = "*****@*****.**" __homepage__ = "http://github.com/ask/timer2/" __docformat__ = "restructuredtext" DEFAULT_MAX_INTERVAL = 2 logger = get_logger("timer2") class Entry(object): cancelled = False def __init__(self, fun, args=None, kwargs=None): self.fun = fun self.args = args or [] self.kwargs = kwargs or {} self.tref = self def __call__(self): return self.fun(*self.args, **self.kwargs) def cancel(self):
from celery.five import THREAD_TIMEOUT_MAX, map from celery.utils.timeutils import timedelta_seconds, timezone from kombu.log import get_logger VERSION = (1, 0, 0) __version__ = '.'.join(map(str, VERSION)) __author__ = 'Ask Solem' __contact__ = '*****@*****.**' __homepage__ = 'http://github.com/ask/timer2/' __docformat__ = 'restructuredtext' DEFAULT_MAX_INTERVAL = 2 TIMER_DEBUG = os.environ.get('TIMER_DEBUG') EPOCH = datetime.utcfromtimestamp(0).replace(tzinfo=timezone.utc) logger = get_logger('timer2') class Entry(object): cancelled = False def __init__(self, fun, args=None, kwargs=None): self.fun = fun self.args = args or [] self.kwargs = kwargs or {} self.tref = self def __call__(self): return self.fun(*self.args, **self.kwargs) def cancel(self):
import random import string import time from kombu.transport import base, TRANSPORT_ALIASES from kombu.transport.pyamqp import Transport as AMQPTransport, \ Connection as AMQPConnection, \ Channel as AMQPChannel from kombu.log import get_logger logger = get_logger("kombu.transport.multiamqp") TRANSPORT_ALIASES[ "multiamqp"] = "kombu_multibroker.transport.multiamqp.Transport" class NoHostsError(Exception): pass class Channel(AMQPChannel): handlers = {"basic_ack": []} def basic_publish(self, *args, **kwargs): super(Channel, self).basic_publish(*args, **kwargs) return self.wait(allowed_methods=[ (60, 80), # Channel.basic_ack ]) class Connection(AMQPConnection):
def setup_logging(loglevel=logging.DEBUG, loggers=['kombu.connection', 'kombu.channel']): for logger in loggers: l = get_logger(logger) l.addHandler(logging.StreamHandler()) l.setLevel(loglevel)
import random import string import time from kombu.transport import base, TRANSPORT_ALIASES from kombu.transport.pyamqp import Transport as AMQPTransport, \ Connection as AMQPConnection, \ Channel as AMQPChannel from kombu.log import get_logger logger = get_logger("kombu.transport.multiamqp") TRANSPORT_ALIASES["multiamqp"] = "kombu_multibroker.transport.multiamqp.Transport" class NoHostsError(Exception): pass class Channel(AMQPChannel): handlers = {"basic_ack": []} def basic_publish(self, *args, **kwargs): super(Channel, self).basic_publish(*args, **kwargs) return self.wait(allowed_methods = [ (60, 80), # Channel.basic_ack ]) class Connection(AMQPConnection): Channel = Channel class Transport(AMQPTransport): Connection = Connection
from billiard.util import register_after_fork except ImportError: # pragma: no cover try: from multiprocessing.util import register_after_fork # noqa except ImportError: def register_after_fork(*args, **kwargs): # noqa pass try: import redis except ImportError: # pragma: no cover redis = None # noqa from . import virtual logger = get_logger('kombu.transport.redis') crit, warn = logger.critical, logger.warn DEFAULT_PORT = 6379 DEFAULT_DB = 0 PRIORITY_STEPS = [0, 3, 6, 9] error_classes_t = namedtuple('error_classes_t', ( 'connection_errors', 'channel_errors', )) # This implementation may seem overly complex, but I assure you there is # a good reason for doing it this way. # # Consuming from several connections enables us to emulate channels,
def test_when_logger(self): logger = get_logger(logging.getLogger('foo')) h1 = logger.handlers[0] assert isinstance(h1, logging.NullHandler)
__author__ = 'marc' from kombu.mixins import ConsumerMixin from kombu.log import get_logger from kombu.utils import kwdict, reprcall import os, sys sys.path.insert(0, os.path.join('..')) sys.path.insert(0, os.path.join('..', '..')) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "renesola.settings") from django.conf import settings from apps.pricing.crm.queues import task_queues logger = get_logger(__name__) class Worker(ConsumerMixin): def __init__(self, connection): self.connection = connection def get_consumers(self, Consumer, channel): return [ Consumer(queues=task_queues, accept=['pickle', 'json'], callbacks=[self.process_task]) ] def process_task(self, body, message): fun = body['fun'] args = body['args'] kwargs = body['kwargs']
from billiard.util import register_after_fork except ImportError: # pragma: no cover try: from multiprocessing.util import register_after_fork # noqa except ImportError: def register_after_fork(*args, **kwargs): # noqa pass try: import redis except ImportError: # pragma: no cover redis = None # noqa logger = get_logger('kombu.transport.redis') crit, warn = logger.critical, logger.warn DEFAULT_PORT = 6379 DEFAULT_DB = 0 PRIORITY_STEPS = [0, 3, 6, 9] error_classes_t = namedtuple('error_classes_t', ( 'connection_errors', 'channel_errors', )) NO_ROUTE_ERROR = """ Cannot route message for exchange {0!r}: Table empty or key no longer exists. Probably the key ({1!r}) has been removed from the Redis database.
def __init__(self, instance, logger=None, ident=None): self.instance = instance self.logger = get_logger(logger) self.ident = ident
from anyjson import deserialize from celery import current_app as celery from celery import platforms from celery.bin.celeryd_multi import MultiTool from celery.utils.encoding import safe_str from eventlet import Timeout from kombu.log import get_logger from kombu.pools import connections, producers from django.db import models from django.utils.translation import ugettext_lazy as _ from . import managers from cyme.utils import cached_property, find_symbol logger = get_logger("Instance") def shsplit(s): if s: return shlex.split(safe_str(s)) return [] class Broker(models.Model): """Broker connection arguments.""" # XXX I think this model can be removed now that it only contains the URL. objects = managers.BrokerManager() url = models.CharField(_(u"URL"), max_length=200, unique=True)
def test_when_logger(self): l = get_logger(logging.getLogger('foo')) h1 = l.handlers[0] self.assertIsInstance(h1, logging.NullHandler)
def test_when_string(self): l = get_logger('foo') assert l is logging.getLogger('foo') h1 = l.handlers[0] assert isinstance(h1, logging.NullHandler)
import os import socket from cPickle import loads, dumps from Queue import Empty import zmq from kombu.exceptions import StdConnectionError, StdChannelError from kombu.log import get_logger from kombu.utils import cached_property from kombu.utils.eventio import poll, READ from . import virtual logger = get_logger('kombu.transport.zmq') DEFAULT_PORT = 5555 DEFAULT_HWM = 128 DEFAULT_INCR = 1 class MultiChannelPoller(object): eventflags = READ def __init__(self): # active channels self._channels = set() # file descriptor -> channel map self._fd_to_chan = {} # poll implementation (epoll/kqueue/select)
def test_when_string(self): l = log.get_logger("foo") self.assertIs(l, logging.getLogger("foo")) h1 = l.handlers[0] self.assertIsInstance(h1, log.NullHandler)
from boto import sdb as _sdb from boto import sqs as _sqs from boto.sdb.domain import Domain from boto.sdb.connection import SDBConnection from boto.sqs.connection import SQSConnection from boto.sqs.message import Message from kombu.five import Empty, range, text_t from kombu.log import get_logger from kombu.utils import cached_property, uuid from kombu.utils.encoding import bytes_to_str, safe_str from kombu.transport.virtual import scheduling from . import virtual logger = get_logger(__name__) # dots are replaced by dash, all other punctuation # replaced by underscore. CHARS_REPLACE_TABLE = dict((ord(c), 0x5f) for c in string.punctuation if c not in '-_.') CHARS_REPLACE_TABLE[0x2e] = 0x2d # '.' -> '-' def maybe_int(x): try: return int(x) except ValueError: return x BOTO_VERSION = tuple(maybe_int(part) for part in boto.__version__.split('.')) W_LONG_POLLING = BOTO_VERSION >= (2, 8)
from contextlib import contextmanager from kombu.exceptions import ChannelError from kombu.five import Empty from kombu.log import get_logger from kombu.utils.json import loads, dumps from kombu.utils.objects import cached_property from . import virtual try: import etcd except ImportError: etcd = None logger = get_logger('kombu.transport.etcd') DEFAULT_PORT = 2379 DEFAULT_HOST = 'localhost' class Channel(virtual.Channel): """Etcd Channel class which talks to the Etcd.""" prefix = 'kombu' index = None timeout = 10 session_ttl = 30 lock_ttl = 10 def __init__(self, *args, **kwargs):
from anyjson import loads, dumps from kombu.exceptions import ( InconsistencyError, StdChannelError, VersionMismatch, ) from kombu.log import get_logger from kombu.utils import cached_property, uuid from kombu.utils.encoding import str_t from kombu.utils.eventio import poll, READ, ERR from . import virtual logger = get_logger("kombu.transport.redis") DEFAULT_PORT = 6379 DEFAULT_DB = 0 PRIORITY_STEPS = [0, 3, 6, 9] # This implementation may seem overly complex, but I assure you there is # a good reason for doing it this way. # # Consuming from several connections enables us to emulate channels, # which means we can have different service guarantees for individual # channels. # # So we need to consume messages from multiple connections simultaneously, # and using epoll means we don't have to do so using multiple threads.
from contextlib import contextmanager from kombu.exceptions import ChannelError from kombu.five import Empty, monotonic from kombu.log import get_logger from kombu.utils.json import loads, dumps from kombu.utils.objects import cached_property from . import virtual try: import consul except ImportError: consul = None logger = get_logger('kombu.transport.consul') DEFAULT_PORT = 8500 DEFAULT_HOST = 'localhost' class LockError(Exception): """An error occurred while trying to acquire the lock.""" class Channel(virtual.Channel): """Consul Channel class which talks to the Consul Key/Value store.""" prefix = 'kombu' index = None timeout = '10s'
from queue import Empty from time import monotonic from kombu.exceptions import ChannelError from kombu.log import get_logger from kombu.utils.json import dumps, loads from kombu.utils.objects import cached_property from . import virtual try: import consul except ImportError: consul = None logger = get_logger('kombu.transport.consul') DEFAULT_PORT = 8500 DEFAULT_HOST = 'localhost' class LockError(Exception): """An error occurred while trying to acquire the lock.""" class Channel(virtual.Channel): """Consul Channel class which talks to the Consul Key/Value store.""" prefix = 'kombu' index = None timeout = '10s'