Esempio n. 1
0
    def test_eq(self):
        e1 = Exchange('foo', 'direct')
        e2 = Exchange('foo', 'direct')
        assert e1 == e2

        e3 = Exchange('foo', 'topic')
        assert e1 != e3

        assert e1.__eq__(True) == NotImplemented
Esempio n. 2
0
    def test_assert_is_bound(self):
        exchange = Exchange('foo', 'direct')
        with pytest.raises(NotBoundError):
            exchange.declare()
        conn = get_conn()

        chan = conn.channel()
        exchange.bind(chan).declare()
        assert 'exchange_declare' in chan
Esempio n. 3
0
    def test_assert_is_bound(self):
        exchange = Exchange('foo', 'direct')
        with self.assertRaises(NotBoundError):
            exchange.declare()
        conn = get_conn()

        chan = conn.channel()
        exchange.bind(chan).declare()
        self.assertIn('exchange_declare', chan)
Esempio n. 4
0
    def test_eq(self):
        e1 = Exchange('foo', 'direct')
        e2 = Exchange('foo', 'direct')
        self.assertEqual(e1, e2)

        e3 = Exchange('foo', 'topic')
        self.assertNotEqual(e1, e3)

        self.assertEqual(e1.__eq__(True), NotImplemented)
Esempio n. 5
0
    def test_bound(self):
        exchange = Exchange('foo', 'direct')
        assert not exchange.is_bound
        assert '<unbound' in repr(exchange)

        chan = get_conn().channel()
        bound = exchange.bind(chan)
        assert bound.is_bound
        assert bound.channel is chan
        assert 'bound to chan:%r' % (chan.channel_id,) in repr(bound)
Esempio n. 6
0
    def test_bound(self):
        exchange = Exchange('foo', 'direct')
        self.assertFalse(exchange.is_bound)
        self.assertIn('<unbound', repr(exchange))

        chan = get_conn().channel()
        bound = exchange.bind(chan)
        self.assertTrue(bound.is_bound)
        self.assertIs(bound.channel, chan)
        self.assertIn('bound to chan:%r' % (chan.channel_id, ),
                      repr(bound))
Esempio n. 7
0
def connect_to_amqp(sysconfig):
    """
    Connect to an AMQP Server, and return the connection and Exchange.
    :param sysconfig: The slickqaweb.model.systemConfiguration.amqpSystemConfiguration.AMQPSystemConfiguration instance
                      to use as the source of information of how to connect.
    :return: (connection, exchange) on success, exception on error
    """
    assert isinstance(sysconfig, AMQPSystemConfiguration)
    configuration = dict()
    configuration['AMQP'] = dict()
    if hasattr(sysconfig, 'hostname') and sysconfig.hostname is not None:
        configuration['AMQP']['hostname'] = sysconfig.hostname
    else:
        raise AMQPConnectionError(message="No hostname defined for AMQP connection.")
    if hasattr(sysconfig, 'port') and sysconfig.port is not None:
        configuration['AMQP']['port'] = sysconfig.port
    if hasattr(sysconfig, 'username') and sysconfig.username is not None:
        configuration['AMQP']['username'] = sysconfig.username
    if hasattr(sysconfig, 'password') and sysconfig.password is not None:
        configuration['AMQP']['password'] = sysconfig.password
    if hasattr(sysconfig, 'virtualHost') and sysconfig.virtualHost is not None:
        configuration['AMQP']['virtual host'] = sysconfig.virtualHost
    if hasattr(sysconfig, 'exchangeName') and sysconfig.exchangeName is not None:
        configuration['AMQP']['exchange'] = sysconfig.exchangeName
    else:
        raise AMQPConnectionError(message="No exchange defined for AMQP connection.")

    logger = logging.getLogger("slickqaweb.amqpcon.connect_to_amqp")

    url = str.format("amqp://{hostname}:{port}", **dict(list(configuration['AMQP'].items())))
    if 'virtual host' in configuration['AMQP'] and configuration['AMQP']['virtual host'] != '':
        url = str.format("{}/{}", url, configuration['AMQP']['virtual host'])
    logger.debug("AMQPConnection configured with url %s", url)
    exchange = Exchange(configuration['AMQP'].get('exchange', "amqp.topic"), type='topic', durable=True)
    logger.debug("AMQPConnection is using exchange %s", exchange)
    connection = None
    if 'username' in configuration['AMQP'] and 'password' in configuration['AMQP']:
        username = configuration['AMQP']['username']
        password = configuration['AMQP']['password']
        logger.debug("Using username %s and password %s to connect to AMQP Broker", username, password)
        connection = Connection(url, userid=username, password=password)
    else:
        connection = Connection(url)

    # typically connection connect on demand, but we want to flush out errors before proceeding
    connection.connect()
    exchange = exchange(connection)
    exchange.declare()
    return (connection, exchange)
 def __init__(self, url, exchange_name=None):
     self.url = url
     self.connect()
     self.exchange_name = exchange_name if exchange_name else celery_queue('socket_notification')
     self.channel = self.connection.channel()
     self.socket_exchange = Exchange(self.exchange_name, type='fanout', channel=self.channel)
     self.socket_exchange.declare()
Esempio n. 9
0
    def test_revive(self):
        exchange = Exchange('foo', 'direct')
        conn = get_conn()
        chan = conn.channel()

        # reviving unbound channel is a noop.
        exchange.revive(chan)
        self.assertFalse(exchange.is_bound)
        self.assertIsNone(exchange._channel)

        bound = exchange.bind(chan)
        self.assertTrue(bound.is_bound)
        self.assertIs(bound.channel, chan)

        chan2 = conn.channel()
        bound.revive(chan2)
        self.assertTrue(bound.is_bound)
        self.assertIs(bound._channel, chan2)
Esempio n. 10
0
    def test_revive(self):
        exchange = Exchange('foo', 'direct')
        conn = get_conn()
        chan = conn.channel()

        # reviving unbound channel is a noop.
        exchange.revive(chan)
        assert not exchange.is_bound
        assert exchange._channel is None

        bound = exchange.bind(chan)
        assert bound.is_bound
        assert bound.channel is chan

        chan2 = conn.channel()
        bound.revive(chan2)
        assert bound.is_bound
        assert bound._channel is chan2
class SocketBrokerClient:
    """
    Base class for web socket notification using broker (redis or rabbitmq)
    """

    connection = None

    def __init__(self, url, exchange_name=None):
        self.url = url
        self.connect()
        self.exchange_name = exchange_name if exchange_name else celery_queue('socket_notification')
        self.channel = self.connection.channel()
        self.socket_exchange = Exchange(self.exchange_name, type='fanout', channel=self.channel)
        self.socket_exchange.declare()

    def open(self):
        """Test if connection is open.

        True if connected else false

        :return bool:
        """
        return self.connection and self.connection.connected

    def connect(self):
        self._close()
        logger.info('Connecting to broker {}'.format(self.url))
        self.connection = Connection(self.url, heartbeat=WS_HEART_BEAT)
        self.connection.connect()
        logger.info('Connected to broker {}'.format(self.url))

    def _close(self):
        if hasattr(self, 'connection') and self.connection:
            logger.info('Closing connecting to broker {}'.format(self.url))
            self.connection.release()
            self.connection = None
            logger.info('Connection closed to broker {}'.format(self.url))

    def close(self):
        self._close()
Esempio n. 12
0
class SocketBrokerClient:
    """
    Base class for web socket notification using broker (redis or rabbitmq)

    """

    connection = None

    def __init__(self, url):
        self.url = url
        self.connect()
        self.channel = self.connection.channel()
        self.socket_exchange = Exchange(exchange_name, type="fanout", channel=self.channel)
        self.socket_exchange.declare()

    def open(self):
        """
        True if connected else false
        :return bool:
        """
        return self.connection and self.connection.connected

    def connect(self):
        self._close()
        logger.info("Connecting to broker {}".format(self.url))
        self.connection = Connection(self.url)
        self.connection.connect()
        logger.info("Connected to broker {}".format(self.url))

    def _close(self):
        if hasattr(self, "connection") and self.connection:
            logger.info("Closing connecting to broker {}".format(self.url))
            self.connection.release()
            self.connection = None
            logger.info("Connection closed to broker {}".format(self.url))

    def close(self):
        self._close()
Esempio n. 13
0
class KWriteQueue(object):
    def __init__(self, channel, exchange, **kwargs):
        self._exchange_declare = kwargs.get("exchange_declare", False)
        if isinstance(exchange, Queue):
            self.exchange = exchange.exchange
        elif isinstance(exchange, basestring):
            self.exchange = Exchange(exchange, type="fanout")  # , durable=True)
        else:
            assert isinstance(exchange, Exchange)
            self.exchange = exchange
        self.channel = maybe_channel(channel)
        self.exchange.maybe_bind(self.channel)
        if self._exchange_declare:
            self.exchange.declare()

        self.producer = messaging.Producer(channel, self.exchange,
                                           serializer='json',
                                           routing_key='',
                                           compression=None,
                                           auto_declare=False)

    def __enter__(self):
        return self

    def __exit__(self, *exc_info):
        self.close()

    def put(self, message, serializer=None, headers=None, compression=None,
            routing_key=None, **kwargs):
        self.producer.publish(message,
                              content_type="application/octet-stream",
                              serializer=serializer,
                              routing_key=routing_key,
                              headers=headers,
                              compression=compression,
                              **kwargs)
Esempio n. 14
0
    def connect(self):
        if not self.connection:
            logging.info("Connecting to server %s", self.amqp_address)
            self.connection = self.create_connection()
        else:
            return

        self.channel = self.connection.channel()
        self.channel.basic_qos(0, self.prefetch_count, False)

        for qname, params in self.queues.iteritems():
            if "exchange" in params:
                exchange = Exchange(params["exchange"], **self.exchanges.get(params["exchange"], {}))
                exchange = exchange(self.channel)
                exchange.declare()
                self.declared_exchanges[params["exchange"]] = exchange

                queue_params = params.copy()
                del queue_params['exchange']
                self.declared_queues[qname] = Queue(qname, exchange=exchange, **queue_params)
            else:
                self.declared_queues[qname] = Queue(qname, **params)

            self.declared_queues[qname](self.channel).declare()
Esempio n. 15
0
    def declare_exchange(self, name, type='direct', queues=None, **options):
        """Create or update exchange

        :param name: name of exchange
        :type name: str
        :param type: type of exchange - direct, fanout, topic, match
        :type type: str
        :param queues: list of queues with routing keys: [[queue_name, routing_key], [queue_name, routing_key], ...]
        :type queues: list, None or tuple
        :param options: additional options for Exchange creation
        """
        if queues is None:
            queues = []

        with connections[self.connection].acquire() as conn:
            exchange = Exchange(name, type=type, channel=conn, **options)
            exchange.declare()
            self.exchanges[name] = exchange
            for q_name, routing_key in queues:
                queue = Queue(name=q_name, channel=conn)
                queue.declare()
                queue.bind_to(exchange=name, routing_key=routing_key)
                self.logger.debug('Queue "%s" with routing_key "%s" was bond to exchange "%s"', q_name,
                                  routing_key if routing_key else q_name, name)
Esempio n. 16
0
    def __init__(self, channel, exchange, **kwargs):
        self._exchange_declare = kwargs.get("exchange_declare", False)
        if isinstance(exchange, Queue):
            self.exchange = exchange.exchange
        elif isinstance(exchange, basestring):
            self.exchange = Exchange(exchange, type="fanout")  # , durable=True)
        else:
            assert isinstance(exchange, Exchange)
            self.exchange = exchange
        self.channel = maybe_channel(channel)
        self.exchange.maybe_bind(self.channel)
        if self._exchange_declare:
            self.exchange.declare()

        self.producer = messaging.Producer(channel, self.exchange,
                                           serializer='json',
                                           routing_key='',
                                           compression=None,
                                           auto_declare=False)
Esempio n. 17
0
    def _init_amqp(self):
        """Init AMQP objects after connection"""
        self.producer = self.connection.Producer()
        self.exchange = Exchange(
            self.exchange_name,
            channel=self.connection.channel(),
            type=self.exchange_type,
            durable=self.exchange_is_durable)

        self.queue = Queue(
            self.queue_name,
            self.exchange,
            channel=self.connection.channel(),
            durable=self.queue_is_durable,
            routing_key=self.routing_key,
            queue_arguments=self.queue_args)

        # We declare object to broker. this way only we can
        # ensure to publish to an existing queue and routing_key
        # AMQP work this way, not a library principle
        self.exchange.declare()
        self.queue.declare()
Esempio n. 18
0
class CeleryConfig:
    """
    Celery Configuration
    http://docs.celeryproject.org/en/latest/userguide/configuration.html
    """
    timezone = 'UTC'

    task_default_queue = 'celery'
    task_low_queue = 'low'
    task_med_queue = 'med'
    task_high_queue = 'high'

    low_pri_modules = {
        'framework.analytics.tasks',
        'framework.celery_tasks',
        'scripts.osfstorage.usage_audit',
        'scripts.stuck_registration_audit',
        'scripts.analytics.tasks',
        'scripts.populate_new_and_noteworthy_projects',
        'scripts.populate_popular_projects_and_registrations',
        'scripts.remind_draft_preregistrations',
        'website.search.elastic_search',
        'scripts.generate_sitemap',
        'scripts.generate_prereg_csv',
        'scripts.analytics.run_keen_summaries',
        'scripts.analytics.run_keen_snapshots',
        'scripts.analytics.run_keen_events',
    }

    med_pri_modules = {
        'framework.email.tasks',
        'scripts.send_queued_mails',
        'scripts.triggered_mails',
        'website.mailchimp_utils',
        'website.notifications.tasks',
    }

    high_pri_modules = {
        'scripts.approve_embargo_terminations',
        'scripts.approve_registrations',
        'scripts.embargo_registrations',
        'scripts.premigrate_created_modified',
        'scripts.refresh_addon_tokens',
        'scripts.retract_registrations',
        'website.archiver.tasks',
        'scripts.add_missing_identifiers_to_preprints'
    }

    try:
        from kombu import Queue, Exchange
    except ImportError:
        pass
    else:
        task_queues = (
            Queue(task_low_queue, Exchange(task_low_queue), routing_key=task_low_queue,
                consumer_arguments={'x-priority': -1}),
            Queue(task_default_queue, Exchange(task_default_queue), routing_key=task_default_queue,
                consumer_arguments={'x-priority': 0}),
            Queue(task_med_queue, Exchange(task_med_queue), routing_key=task_med_queue,
                consumer_arguments={'x-priority': 1}),
            Queue(task_high_queue, Exchange(task_high_queue), routing_key=task_high_queue,
                consumer_arguments={'x-priority': 10}),
        )

        task_default_exchange_type = 'direct'
        task_routes = ('framework.celery_tasks.routers.CeleryRouter', )
        task_ignore_result = True
        task_store_errors_even_if_ignored = True

    broker_url = os.environ.get('BROKER_URL', 'amqp://{}:{}@{}:{}/{}'.format(RABBITMQ_USERNAME, RABBITMQ_PASSWORD, RABBITMQ_HOST, RABBITMQ_PORT, RABBITMQ_VHOST))
    broker_use_ssl = False

    # Default RabbitMQ backend
    result_backend = 'django-db'  # django-celery-results

    beat_scheduler = 'django_celery_beat.schedulers:DatabaseScheduler'

    # Modules to import when celery launches
    imports = (
        'framework.celery_tasks',
        'framework.email.tasks',
        'website.mailchimp_utils',
        'website.notifications.tasks',
        'website.archiver.tasks',
        'website.search.search',
        'website.project.tasks',
        'scripts.populate_new_and_noteworthy_projects',
        'scripts.populate_popular_projects_and_registrations',
        'scripts.refresh_addon_tokens',
        'scripts.remind_draft_preregistrations',
        'scripts.retract_registrations',
        'scripts.embargo_registrations',
        'scripts.approve_registrations',
        'scripts.approve_embargo_terminations',
        'scripts.triggered_mails',
        'scripts.send_queued_mails',
        'scripts.analytics.run_keen_summaries',
        'scripts.analytics.run_keen_snapshots',
        'scripts.analytics.run_keen_events',
        'scripts.generate_sitemap',
        'scripts.premigrate_created_modified',
        'scripts.generate_prereg_csv',
        'scripts.add_missing_identifiers_to_preprints',
    )

    # Modules that need metrics and release requirements
    # imports += (
    #     'scripts.osfstorage.usage_audit',
    #     'scripts.stuck_registration_audit',
    #     'scripts.analytics.tasks',
    #     'scripts.analytics.upload',
    # )

    # celery.schedule will not be installed when running invoke requirements the first time.
    try:
        from celery.schedules import crontab
    except ImportError:
        pass
    else:
        #  Setting up a scheduler, essentially replaces an independent cron job
        # Note: these times must be in UTC
        beat_schedule = {
            '5-minute-emails': {
                'task': 'website.notifications.tasks.send_users_email',
                'schedule': crontab(minute='*/5'),
                'args': ('email_transactional',),
            },
            'daily-emails': {
                'task': 'website.notifications.tasks.send_users_email',
                'schedule': crontab(minute=0, hour=5),  # Daily at 12 a.m. EST
                'args': ('email_digest',),
            },
            'refresh_addons': {
                'task': 'scripts.refresh_addon_tokens',
                'schedule': crontab(minute=0, hour=7),  # Daily 2:00 a.m
                'kwargs': {'dry_run': False, 'addons': {
                    'box': 60,          # https://docs.box.com/docs/oauth-20#section-6-using-the-access-and-refresh-tokens
                    'googledrive': 14,  # https://developers.google.com/identity/protocols/OAuth2#expiration
                    'mendeley': 14      # http://dev.mendeley.com/reference/topics/authorization_overview.html
                }},
            },
            'retract_registrations': {
                'task': 'scripts.retract_registrations',
                'schedule': crontab(minute=0, hour=5),  # Daily 12 a.m
                'kwargs': {'dry_run': False},
            },
            'embargo_registrations': {
                'task': 'scripts.embargo_registrations',
                'schedule': crontab(minute=0, hour=5),  # Daily 12 a.m
                'kwargs': {'dry_run': False},
            },
            'add_missing_identifiers_to_preprints': {
                'task': 'scripts.add_missing_identifiers_to_preprints',
                'schedule': crontab(minute=0, hour=5),  # Daily 12 a.m
                'kwargs': {'dry_run': False},
            },
            'approve_registrations': {
                'task': 'scripts.approve_registrations',
                'schedule': crontab(minute=0, hour=5),  # Daily 12 a.m
                'kwargs': {'dry_run': False},
            },
            'approve_embargo_terminations': {
                'task': 'scripts.approve_embargo_terminations',
                'schedule': crontab(minute=0, hour=5),  # Daily 12 a.m
                'kwargs': {'dry_run': False},
            },
            'triggered_mails': {
                'task': 'scripts.triggered_mails',
                'schedule': crontab(minute=0, hour=5),  # Daily 12 a.m
                'kwargs': {'dry_run': False},
            },
            'send_queued_mails': {
                'task': 'scripts.send_queued_mails',
                'schedule': crontab(minute=0, hour=17),  # Daily 12 p.m.
                'kwargs': {'dry_run': False},
            },
            'prereg_reminder': {
                'task': 'scripts.remind_draft_preregistrations',
                'schedule': crontab(minute=0, hour=12),  # Daily 12 p.m.
                'kwargs': {'dry_run': False},
            },
            'new-and-noteworthy': {
                'task': 'scripts.populate_new_and_noteworthy_projects',
                'schedule': crontab(minute=0, hour=7, day_of_week=6),  # Saturday 2:00 a.m.
                'kwargs': {'dry_run': False}
            },
            'update_popular_nodes': {
                'task': 'scripts.populate_popular_projects_and_registrations',
                'schedule': crontab(minute=0, hour=7),  # Daily 2:00 a.m.
                'kwargs': {'dry_run': False}
            },
            'run_keen_summaries': {
                'task': 'scripts.analytics.run_keen_summaries',
                'schedule': crontab(minute=0, hour=6),  # Daily 1:00 a.m.
                'kwargs': {'yesterday': True}
            },
            'run_keen_snapshots': {
                'task': 'scripts.analytics.run_keen_snapshots',
                'schedule': crontab(minute=0, hour=8),  # Daily 3:00 a.m.
            },
            'run_keen_events': {
                'task': 'scripts.analytics.run_keen_events',
                'schedule': crontab(minute=0, hour=9),  # Daily 4:00 a.m.
                'kwargs': {'yesterday': True}
            },
            'generate_sitemap': {
                'task': 'scripts.generate_sitemap',
                'schedule': crontab(minute=0, hour=5),  # Daily 12:00 a.m.
            },
            'generate_prereg_csv': {
                'task': 'scripts.generate_prereg_csv',
                'schedule': crontab(minute=0, hour=10, day_of_week=0),  # Sunday 5:00 a.m.
            },
        }
Esempio n. 19
0
from kombu import Connection, Exchange, Queue

media_exchange = Exchange('media', 'direct', durable=True)
video_queue = Queue('video', exchange=media_exchange, routing_key='video')


def process_media(body, message):
    print(body)
    message.ack()


# connections
broker = 'sqla+postgresql://chillaranand:f@localhost/mitron'
broker = 'amqp://*****:*****@localhost//'

with Connection(broker) as conn:
    # produce
    producer = conn.Producer(serializer='json')
    producer.publish({
        'name': '/tmp/lolcat1.avi',
        'size': 1301013
    },
                     exchange=media_exchange,
                     routing_key='video',
                     declare=[video_queue])

    # the declare above, makes sure the video queue is declared
    # so that the messages can be delivered.
    # It's a best practice in Kombu to have both publishers and
    # consumers declare the queue. You can also declare the
    # queue manually using:
Esempio n. 20
0
    print('Received message: %r' % (body, ))
    print('  properties:\n%s' % (pretty(message.properties), ))
    print('  delivery_info:\n%s' % (pretty(message.delivery_info), ))
    message.ack()

#: Create a connection and a channel.
#: If hostname, userid, password and virtual_host is not specified
#: the values below are the default, but listed here so it can
#: be easily changed.
with Connection('pyamqp://*****:*****@localhost:5672//') as connection:
    
    """The configuration of the message flow is as follows:
    gateway_kombu_exchange -> internal_kombu_exchange -> kombu_demo queue 
    """
    gateway_exchange = Exchange('gateway_kombu_demo', type='direct')
    exchange = Exchange('internal_kombu_demo', type='direct')
    binded = exchange.bind(connection.channel())
    binded.exchange_bind(gateway_exchange, routing_key = 'kombu_demo')
    
    queue = Queue('kombu_demo', exchange, routing_key='kombu_demo')
    
    #: Create consumer using our callback and queue.
    #: Second argument can also be a list to consume from
    #: any number of queues.
    with Consumer(connection, queue, callbacks=[handle_message]):

        #: This waits for a single event.  Note that this event may not
        #: be a message, or a message that is to be delivered to the consumers
        #: channel, but any event received on the connection.        
        recv = eventloop(connection)
        while True:
Esempio n. 21
0
@app.task
def test_queue_1():
    return 'queue1'


@app.task
def test_queue_2():
    return 'queue2'


# queue_1与queue_2为消息队列名称
#  Exchange:为交换机实例,具有不同的类型。
#  routing_key:用来告知exchange将task message传送至相对应的queue

queue = (Queue('queue_1',
               Exchange('Exchange1', type='direct'),
               routing_key='queue_1_key'),
         Queue('queue_2',
               Exchange('Exchange2', type='direct'),
               routing_key='queue_2_key'))

route = {
    'main.test_queue_1': {
        'queue': 'queue_1',
        'routing_key': 'queue_1_key'
    },
    'main.test_queue_2': {
        'queue': 'queue_2',
        'routing_key': 'queue_2_key'
    }
}
Esempio n. 22
0
{toc}
"""

CSTRESS_QUEUE = os.environ.get('CSTRESS_QUEUE_NAME', 'c.stress')
CSTRESS_BACKEND = os.environ.get('CSTRESS_BACKEND', 'redis://')

app = Celery(
    'stress', broker='amqp://', backend=CSTRESS_BACKEND,
    set_as_current=False,
)
app.conf.update(
    CELERYD_PREFETCH_MULTIPLIER=10,
    CELERY_DEFAULT_QUEUE=CSTRESS_QUEUE,
    CELERY_QUEUES=(
        Queue(CSTRESS_QUEUE,
              exchange=Exchange(CSTRESS_QUEUE, durable=False),
              routing_key=CSTRESS_QUEUE,
              durable=False, auto_delete=True),
    ),
)


@app.task
def _marker(s, sep='-'):
    print('{0} {1} {2}'.format(sep * 3, s, sep * 3))


@app.task
def add(x, y):
    return x + y
Esempio n. 23
0
# mqvhost is generally / by default, mqport is generally 5672
# sample with variables:
# connString = 'amqp://{0}:{1}@{2}:{3}/{4}'.format(mqusername, mqpassword, mqservername, mqport, mqvhost)

# sample with hard-coded values.
connString = 'amqp://{0}:{1}@{2}:{3}/{4}'.format('guest', 'guest',
                                                 'servername', 5672, '/')

# ssl or not
mqConn = Connection(connString, ssl=False)

# Declare the Task Exchange for events
# delivery_mode=1 is fast/auto-ack messages, 2 is require ack.
# mozdef default exchange is: eventtask, routing key is also: eventtask
eventTaskExchange = Exchange(name='eventtask',
                             type='direct',
                             durable=True,
                             delivery_mode=1)
eventTaskExchange(mqConn).declare()
mqproducer = mqConn.Producer(serializer='json')

# make an event
event = dict()
# best practice is to send an ISO formatted timestamp
# so upstream can tell the source time zone
event['timestamp'] = pytz.timezone('UTC').localize(
    datetime.utcnow()).isoformat()
event['summary'] = 'just a test, only a test'
event['category'] = 'testing'
event['severity'] = 'INFO'
event['processid'] = os.getpid()
event['processname'] = sys.argv[0]
Esempio n. 24
0
CELERY_WORKER_HIJACK_ROOT_LOGGER = False

# 任务执行最长时间20分钟
CELERY_TASK_SOFT_TIME_LIMIT = 1200
CELERY_TASK_TIME_LIMIT = 1200

CELERY_TIMEZONE = TIME_ZONE
CELERY_TASK_DEFAULT_QUEUE = 'default'

CELERY_TASK_SERIALIZER = 'pickle'
CELERY_RESULT_SERIALIZER = 'pickle'
CELERY_ACCEPT_CONTENT = ['pickle', 'json']

# 定义执行队列
CELERY_TASK_QUEUES = (Queue('default',
                            Exchange('default'),
                            routing_key='default'),
                      Queue('crontab',
                            Exchange('crontab'),
                            routing_key='crontab'),
                      Queue('async', Exchange('async'), routing_key='async'))

# 制定特定任务路由到特定执行队列
CELERY_TASK_ROUTES = {
    'meeting.celery._async_call': {
        'queue': 'async',
        'routing_key': 'async'
    },
}

CELERY_TASK_ANNOTATIONS = {'*': celery_annotations_dict}
Esempio n. 25
0
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/

For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
import djcelery
from kombu import Queue, Exchange
djcelery.setup_loader()
BROKER_URL = 'redis://127.0.0.1:6379/0'
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERY_IMPORTS = ("myapp.tasks", "myapp.include.scheduled",
                  "myapp.include.mon")
CELERY_QUEUES = (
    Queue('default', Exchange('default'), routing_key='default'),
    Queue('mysql_monitor', Exchange('monitor'), routing_key='monitor.mysql'),
)
CELERY_ROUTES = {
    'myapp.include.mon.mon_mysql': {
        'queue': 'mysql_monitor',
        'routing_key': 'monitor.mysql'
    },
    'myapp.include.mon.check_mysql_host': {
        'queue': 'mysql_monitor',
        'routing_key': 'monitor.mysql'
    },
    'myapp.include.mon.sendmail_monitor': {
        'queue': 'mysql_monitor',
        'routing_key': 'monitor.mysql'
    },
Esempio n. 26
0
 def test_set_passive_mode(self):
     exc = Exchange('foo', 'direct', passive=True)
     assert exc.passive
Esempio n. 27
0
from kombu import Connection, Exchange, Queue
import amqp.exceptions


def process_media(body, message):
    print '*'*10
    print body
    print '*'*10
    message.ack()

# Connection
conn = Connection('amqp://guest@localhost//')
channel = conn.channel()

media_exchange = Exchange('media', 'topic', channel=channel, durable=True)
video_queue = Queue('video', channel=channel, exchange=media_exchange, routing_key='video')
try:
    media_exchange.declare()
except amqp.exceptions.PreconditionFailed, e:
    print 'zhe...'
    print e
    exit()


# produce
producer = conn.Producer(serializer='json', auto_declare=False)
producer.publish('name',
    exchange = media_exchange, routing_key='video',
    declare=[video_queue])

    # # consume
Esempio n. 28
0
 def test_set_transient_delivery_mode(self):
     exc = Exchange('foo', 'direct', delivery_mode='transient')
     assert exc.delivery_mode == Exchange.TRANSIENT_DELIVERY_MODE
Esempio n. 29
0
CELERY_BROKER_URL = 'amqp://*****:*****@localhost:5672//'
CELERY_EVENT_QUEUE_TTL = 5
CELERY_TASK_DEFAULT_QUEUE = 'tower'
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_TRACK_STARTED = True
CELERY_TASK_TIME_LIMIT = None
CELERY_TASK_SOFT_TIME_LIMIT = None
CELERY_WORKER_POOL_RESTARTS = True
CELERY_BEAT_SCHEDULER = 'celery.beat.PersistentScheduler'
CELERY_BEAT_MAX_LOOP_INTERVAL = 60
CELERY_RESULT_BACKEND = 'django-db'
CELERY_IMPORTS = ('awx.main.scheduler.tasks', )
CELERY_TASK_QUEUES = (Queue('default',
                            Exchange('default'),
                            routing_key='default'),
                      Queue('tower', Exchange('tower'), routing_key='tower'),
                      Queue('tower_scheduler',
                            Exchange('scheduler', type='topic'),
                            routing_key='tower_scheduler.job.#',
                            durable=False), Broadcast('tower_broadcast_all'))
CELERY_TASK_ROUTES = {
    'awx.main.scheduler.tasks.run_task_manager': {
        'queue': 'tower',
        'routing_key': 'tower'
    },
    'awx.main.scheduler.tasks.run_job_launch': {
        'queue': 'tower_scheduler',
        'routing_key': 'tower_scheduler.job.launch'
    },
Esempio n. 30
0
RABBITREDIS_HOSTNAME = os.environ.get('RABBITREDIS_PORT_6379_TCP_ADDR',
                                      'localhost')
RABBITREDIS_PORT = int(os.environ.get('RABBITREDIS_PORT_6379_TCP_PORT', 6379))

# https://github.com/celery/celery/issues/1909 describes the tradeoffs of redis and rabbitmq for results backend
CELERY_RESULT_BACKEND = 'redis://{hostname}:{port}/{db}/'.format(
    hostname=RABBITREDIS_HOSTNAME,
    port=RABBITREDIS_PORT,
    db=os.environ.get('RABBITREDIS_DB', '0'))
# CELERY_RESULT_BACKEND = BROKER_URL
CELERY_TASK_RESULT_EXPIRES = 60
CELERY_TASK_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json']  # Ignore other content
CELERY_RESULT_SERIALIZER = 'json'
CELERYD_PREFETCH_MULTIPLIER = 2

from kombu import Exchange, Queue
exchange_name = 'sync@{hostname}'.format(
    hostname=os.environ.get('HOSTNAME', 'localhost'))
sync_exchange = Exchange(name=exchange_name, type='fanout')

CELERY_SYNC_WORKER_COUNT = int(os.environ.get('CELERY_SYNC_WORKER_COUNT', 1))
all_queues = ()
for i in range(1, CELERY_SYNC_WORKER_COUNT + 1):
    queue_name = 'sync_queue_{worker_number}@{hostname}'.format(
        worker_number=i, hostname=os.environ.get('HOSTNAME', 'localhost'))
    all_queues += (Queue(name=queue_name, exchange=sync_exchange), )

CELERY_QUEUES = all_queues
Esempio n. 31
0
class Publisher(KombuConfReader):

    def __init__(self, config):
        self._log = log.getLogger()
        KombuConfReader.__init__(self, config)

        self.connection = Connection(self.broker_url)
        try:
            self._init_amqp()
        except Exception as exc:
            self._log.error('Publisher fail in init connection: %s' % exc)
            raise

    def _init_amqp(self):
        """Init AMQP objects after connection"""
        self.producer = self.connection.Producer()
        self.exchange = Exchange(
            self.exchange_name,
            channel=self.connection.channel(),
            type=self.exchange_type,
            durable=self.exchange_is_durable)

        self.queue = Queue(
            self.queue_name,
            self.exchange,
            channel=self.connection.channel(),
            durable=self.queue_is_durable,
            routing_key=self.routing_key,
            queue_arguments=self.queue_args)

        # We declare object to broker. this way only we can
        # ensure to publish to an existing queue and routing_key
        # AMQP work this way, not a library principle
        self.exchange.declare()
        self.queue.declare()

    def switch_connection(self):
        """Switch AMQP connection from url to backup_url and vice versa"""
        self._log.warn('Switching AMQP connection from %s' %
                       self.connection.as_uri())
        if (self.connection.hostname in self.broker_url
                and self.broker_backup_url):
            self.connection = Connection(self.broker_backup_url)
        elif self.connection.hostname in self.broker_backup_url:
            self.connection = Connection(self.broker_url)
        else:
            raise URLError('Invalid current URI to switch connection : %s' %
                           self.connection.as_uri())
        self._init_amqp()

    def _publish(self, msg):
        """Publish message ensuring connection is available"""
        publish = self.connection.ensure(
            self.producer, self.producer.publish, max_retries=3)

        publish(msg, exchange=self.exchange,
                routing_key=self.routing_key,
                serializer=self.serializer,
                compression=self.compression)

        return True

    def publish(self, msg):
        """
        must return True/False if message is well publish or not
        """
        try:
            return self._publish(msg)
        except Exception as exc:
            try:
                self.switch_connection()
                return self._publish(msg)
            except Exception as exc:
                self._log.error('Publish fail when switching connection: %s' %
                                exc)
            return False
Esempio n. 32
0
 class Test1:
     data: int
     event = Event(Exchange(), 'test_register_and_run')\
Esempio n. 33
0
 def test_set_persistent_delivery_mode(self):
     exc = Exchange('foo', 'direct', delivery_mode='persistent')
     assert exc.delivery_mode == Exchange.PERSISTENT_DELIVERY_MODE
Esempio n. 34
0
    'MED': {
        'name': 'med',
        'priority': 20,
        'modules': {'share.tasks.DisambiguatorTask', },
    },
    'HIGH': {
        'name': 'high',
        'priority': 30,
        'modules': {'share.tasks.BotTask', },
    },
}

CELERY_QUEUES = tuple(
    Queue(
        v['name'],
        Exchange(v['name']),
        routing_key=v['name'],
        consumer_arguments={'x-priority': v['priority']}
    ) for v in QUEUES.values()
)

CELERY_DEFAULT_EXCHANGE_TYPE = 'direct'
CELERY_ROUTES = ('share.celery.CeleryRouter', )
CELERY_IGNORE_RESULT = True
CELERY_STORE_ERRORS_EVEN_IF_IGNORED = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True

# Logging
LOG_LEVEL = os.environ.get('LOG_LEVEL', 'WARNING').upper()

LOGGING = {
Esempio n. 35
0
import time
import datetime
import threading

from twisted.internet import protocol, reactor, defer
from kombu import Exchange, Queue, Consumer, Connection
from kombu.messaging import Producer
from kombu.transport.base import Message
from kombu.common import eventloop, drain_consumer
from kombu.async import Hub

connection = Connection('amqp://*****:*****@localhost//')
channel = connection.channel()

# 定义了一个exchange
task_exchange = Exchange('tasks', channel=channel, type='topic', durable=False)
task_exchange.declare()

# 在这里进行了exchange和queue的绑定,并且指定了这个queue的routing_key
task_queue = Queue('piap', task_exchange, channel=channel,
                   routing_key='suo_piao.#', durable=False)
task_queue2 = Queue('piap2.*abc.#', task_exchange, channel=channel,
                   routing_key='suo_piao.#', durable=False)
task_queue3 = Queue('piap3', task_exchange, channel=channel,
                   routing_key='suo_piao.abc.#', durable=False)
task_queue4 = Queue('piap4', task_exchange, channel=channel,
                   routing_key='abc.#', durable=False)
task_queues = []
for x in xrange(1,10):
  tmpQueue = Queue('testFlood'+str(x), task_exchange, channel=channel,
                   routing_key='abc.*.'+str(x), durable=False)
Esempio n. 36
0
 def test_bind_at_instantiation(self):
     assert Exchange('foo', channel=get_conn().channel()).is_bound
Esempio n. 37
0
        password=os.environ.get('RABBIT_ENV_RABBITMQ_PASS', 'mypass'),
        hostname=RABBIT_HOSTNAME,
        vhost=os.environ.get('RABBIT_ENV_VHOST', ''))
# We don't want to have dead connections stored on rabbitmq, so we have to negotiate using heartbeats
BROKER_HEARTBEAT = '?heartbeat=30'
if not BROKER_URL.endswith(BROKER_HEARTBEAT):
    BROKER_URL += BROKER_HEARTBEAT

BROKER_POOL_LIMIT = 1
BROKER_CONNECTION_TIMEOUT = 10

# Celery Config
CELERY_DEFAULT_QUEUE = 'default'
CELERY_QUEUES = (Queue(
    'default',
    Exchange('default'),
    routing_key='default',
), )

# Sensible settings for celery
CELERY_ALWAYS_EAGER = False
CELERY_ACKS_LATE = True
CELERY_TASK_PUBLISH_RETRY = True
CELERY_DISABLE_RATE_LIMITS = False

# By default we will ignore result
# If you want to see results and try out tasks interactively, change to False

# Or change this setting on tasks level
CELERY_IGNORE_RESULT = True
CELERY_SEND_TASK_ERROR_EMAILS = False
Esempio n. 38
0
 def test_create_message(self):
     chan = get_conn().channel()
     Exchange('foo', channel=chan).Message({'foo': 'bar'})
     assert 'prepare_message' in chan
Esempio n. 39
0
 class Test3:
     data: int
     event = Event(Exchange(), 'test_no_register')
Esempio n. 40
0
#: This is the callback applied when a message is received.
def handle_message(body, message):
    print('Received message: %r' % (body, ))
    print('  properties:\n%s' % (pretty(message.properties), ))
    print('  delivery_info:\n%s' % (pretty(message.delivery_info), ))
    message.ack()

#: Create a connection and a channel.
#: If hostname, userid, password and virtual_host is not specified
#: the values below are the default, but listed here so it can
#: be easily changed.
with Connection('pyamqp://*****:*****@localhost:5672//') as connection:
    # The configuration of the message flow is as follows:
    #   gateway_kombu_exchange -> internal_kombu_exchange -> kombu_demo queue
    gateway_exchange = Exchange('gateway_kombu_demo')(connection)
    exchange = Exchange('internal_kombu_demo')(connection)
    gateway_exchange.declare()
    exchange.declare()
    exchange.bind_to(gateway_exchange, routing_key='kombu_demo')

    queue = Queue('kombu_demo', exchange, routing_key='kombu_demo')

    #: Create consumer using our callback and queue.
    #: Second argument can also be a list to consume from
    #: any number of queues.
    with Consumer(connection, queue, callbacks=[handle_message]):

        #: This waits for a single event.  Note that this event may not
        #: be a message, or a message that is to be delivered to the consumers
        #: channel, but any event received on the connection.
Esempio n. 41
0
 class Test:
     data: int
     event = Event(Exchange(), '')
Esempio n. 42
0
 def __init__(self, url):
     self.url = url
     self.connect()
     self.channel = self.connection.channel()
     self.socket_exchange = Exchange(exchange_name, type="fanout", channel=self.channel)
     self.socket_exchange.declare()
Esempio n. 43
0
from kombu import Exchange, Queue

task_exchange = Exchange("tasks", type="direct")
task_queues = [
    Queue("hipri", task_exchange, routing_key="hipri"),
    Queue("midpri", task_exchange, routing_key="midpri"),
    Queue("lopri", task_exchange, routing_key="lopri")
]
Esempio n. 44
0
 def test_repr_with_bindings(self):
     ex = Exchange('foo')
     x = Queue('foo', bindings=[ex.binding('A'), ex.binding('B')])
     self.assertTrue(repr(x))
Esempio n. 45
0
File: filr.py Progetto: sp00/filr
import boto
from kombu import BrokerConnection, Exchange, Queue, Consumer

connection = BrokerConnection()
connection.connect()

channel = connection.channel()
exchange = Exchange(name="android", type="fanout", channel=channel, durable=True)
exchange.declare()

channel = connection.channel()
queue = Queue(name="filr", exchange=exchange, durable=True, auto_delete=False, channel=channel, routing_key="filr")
queue.declare()


def fetch(b, m):
    print b, m


consumer = Consumer(channel=connection.channel(), queues=queue, auto_declare=False, callbacks=[fetch])
consumer.consume(no_ack=False)

while True:
    connection.drain_events()
    pass

# execfile('.private-settings')

# sdb = boto.connect_sdb(key_id, sec_key)
# domain = sdb.create_domain('android')
# item = domain.new_item('kral_step1')
Esempio n. 46
0
    def ready(self):
        super(CheckoutsApp, self).ready()

        APIEndPoint(app=self, version_string='1')

        Document.add_to_class(
            'check_in',
            lambda document, user=None: DocumentCheckout.objects.check_in_document(document, user)
        )
        Document.add_to_class(
            'checkout_info',
            lambda document: DocumentCheckout.objects.document_checkout_info(
                document
            )
        )
        Document.add_to_class(
            'checkout_state',
            lambda document: DocumentCheckout.objects.document_checkout_state(
                document
            )
        )
        Document.add_to_class(
            'is_checked_out',
            lambda document: DocumentCheckout.objects.is_document_checked_out(
                document
            )
        )

        ModelPermission.register(
            model=Document, permissions=(
                permission_document_checkout,
                permission_document_checkin,
                permission_document_checkin_override,
            )
        )

        app.conf.CELERYBEAT_SCHEDULE.update(
            {
                'task_check_expired_check_outs': {
                    'task': 'checkouts.tasks.task_check_expired_check_outs',
                    'schedule': timedelta(
                        seconds=CHECK_EXPIRED_CHECK_OUTS_INTERVAL
                    ),
                },
            }
        )

        app.conf.CELERY_QUEUES.append(
            Queue(
                'checkouts_periodic', Exchange('checkouts_periodic'),
                routing_key='checkouts_periodic', delivery_mode=1
            ),
        )

        app.conf.CELERY_ROUTES.update(
            {
                'checkouts.tasks.task_check_expired_check_outs': {
                    'queue': 'checkouts_periodic'
                },
            }
        )

        menu_facet.bind_links(links=(link_checkout_info,), sources=(Document,))
        menu_main.bind_links(links=(link_checkout_list,))
        menu_sidebar.bind_links(
            links=(link_checkout_document, link_checkin_document),
            sources=(
                'checkouts:checkout_info', 'checkouts:checkout_document',
                'checkouts:checkin_document'
            )
        )
Esempio n. 47
0
CELERY_TASK_SERIALIZER = 'pickle'
CELERY_ACCEPT_CONTENT = ['pickle', 'json']
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERY_TASK_RESULT_EXPIRES = 60 * 60 * 24
CELERYD_MAX_TASKS_PER_CHILD = 40
CELERY_TRACK_STARTED = True
CELERY_ENABLE_UTC = False
CELERY_TIMEZONE = 'Asia/Shanghai'
platforms.C_FORCE_ROOT = True

#celery route config
CELERY_IMPORTS = ("OpsManage.tasks.assets", "OpsManage.tasks.ansible",
                  "OpsManage.tasks.cron", "OpsManage.tasks.deploy",
                  "OpsManage.tasks.sql", "OpsManage.tasks.sched")
CELERY_QUEUES = (
    Queue('default', Exchange('default'), routing_key='default'),
    Queue('ansible', Exchange('ansible'), routing_key='ansible_#'),
)
CELERY_ROUTES = {
    'OpsManage.tasks.sql.*': {
        'queue': 'default',
        'routing_key': 'default'
    },
    'OpsManage.tasks.assets.*': {
        'queue': 'default',
        'routing_key': 'default'
    },
    'OpsManage.tasks.cron.*': {
        'queue': 'default',
        'routing_key': 'default'
    },
Esempio n. 48
0
from importlib import import_module
import logging
import time
from zentral.conf import settings
from kombu import Connection, Consumer, Exchange, Queue
from kombu.mixins import ConsumerMixin, ConsumerProducerMixin
from kombu.pools import producers
from zentral.utils.json import save_dead_letter

logger = logging.getLogger('zentral.core.queues.backends.kombu')

raw_events_exchange = Exchange('raw_events', type='direct', durable=True)

events_exchange = Exchange('events', type="fanout", durable=True)
enrich_events_queue = Queue('enrich_events',
                            exchange=events_exchange,
                            durable=True)
enriched_events_exchange = Exchange('enriched_events',
                                    type="fanout",
                                    durable=True)
process_events_queue = Queue('process_events',
                             exchange=enriched_events_exchange,
                             durable=True)


class BaseWorker:
    name = "UNDEFINED"
    counters = []

    def setup_metrics_exporter(self, *args, **kwargs):
        self.log_info("run")
from kombu import Connection, Exchange, Producer, Queue, Consumer
rabbit_url = "redis://localhost:6379/"
print("rabbit_url: ", rabbit_url)
conn = Connection(rabbit_url)
print("conn: ", conn)

channel = conn.channel()
print("channel: ", channel)

exchange = Exchange("example-exchange", type="direct")
print("exchange: ", exchange)

producer = Producer(exchange=exchange, channel=channel, routing_key='BOB')
print("producer: ", producer)

queue = Queue(name="example-queue", exchange=exchange, routing_key='BOB')
print("queue: ", queue, "\n", queue.maybe_bind(conn), queue.declare())
queue.maybe_bind(conn)
queue.declare()

producer.publish("Hello there")
print("msg: ", producer.publish("Hello there"))

def process_body(body, message):
    print("Message: ", body)
    message.ack()


with Consumer(conn, queues=queue, callbacks=[process_body], accept=["text/plain"]):
    print("consumer: ", conn)
    conn.drain_events(timeout=2)
Esempio n. 50
0
 def test_bind_to_by_name(self):
     chan = get_conn().channel()
     foo = Exchange('foo', 'topic')
     foo(chan).bind_to('bar')
     assert 'exchange_bind' in chan
Esempio n. 51
0
 def test_publish(self):
     chan = get_conn().channel()
     Exchange('foo', channel=chan).publish('the quick brown fox')
     assert 'basic_publish' in chan
Esempio n. 52
0
import time
from copy import copy

from kombu import Exchange

__all__ = [
    'Event',
    'event_exchange',
    'get_exchange',
    'group_from',
]

#: Exchange used to send events on.
#: Note: Use :func:`get_exchange` instead, as the type of
#: exchange will vary depending on the broker connection.
event_exchange = Exchange('celeryev', type='topic')


def Event(type, _fields=None, __dict__=dict, __now__=time.time, **fields):
    """Create an event.

    Notes:
        An event is simply a dictionary: the only required field is ``type``.
        A ``timestamp`` field will be set to the current time if not provided.
    """
    event = __dict__(_fields, **fields) if _fields else fields
    if 'timestamp' not in event:
        event.update(timestamp=__now__(), type=type)
    else:
        event['type'] = type
    return event
Esempio n. 53
0
 def test_delete(self):
     chan = get_conn().channel()
     Exchange('foo', channel=chan).delete()
     assert 'exchange_delete' in chan
Esempio n. 54
0
 def test__repr__(self):
     b = Exchange('foo', 'topic')
     assert 'foo(topic)' in repr(b)
     assert 'Exchange' in repr(b)
Esempio n. 55
0
 def _declare_exchange(self, name, type, retry=False, retry_policy={}):
     ex = Exchange(name, type=type, durable=self.durable,
                   auto_delete=self.auto_delete)(self.channel)
     if retry:
         return self.connection.ensure(ex, ex.declare, **retry_policy)
     return ex.declare()
Esempio n. 56
0
def get_rpc_exchange(container):
    exchange_name = container.config.get(RPC_EXCHANGE_CONFIG_KEY, 'nameko-rpc')
    exchange = Exchange(exchange_name, durable=True, type="topic")
    return exchange