def _send_message(connection, routing_key, data, envelope): # Make a message envelope if we are told to have one if envelope: message = {} message["payload"] = data message["_meta"] = { "exchange": CONF["MSG_EXCHANGE"], "routing_key": routing_key, "sent": mercurial.util.datestr(None, CONF["DATE_FORMAT"]), # TODO: Support more than just JSON "serializer": "json", } else: message = data # Set up our broker publisher publisher = Publisher( connection=connection, exchange=CONF["MSG_EXCHANGE"], exchange_type="topic", routing_key=routing_key ) # Send the message # TODO: Support more than just JSON publisher.send(message) # Close the publishing connection publisher.close()
def cast(msg, event_type, topic, priority): yagi.config.setup(config_path="/etc/yagi.conf") conf = yagi.config.config_with("rabbit_broker") host = conf("host") port = conf("port") user = conf("user") exchange = conf("exchange") password = conf("password") vhost = conf("vhost") message_dict = { "message_id": str(uuid.uuid4()), "event_type": event_type, "publisher_id": "some_publisher", "priority": priority, "timestamp": str(datetime.datetime.utcnow()), "payload": msg, } conn = BrokerConnection(hostname=host, port=port, userid=user, password=password, virtual_host=vhost) publisher = Publisher( connection=conn, exchange=exchange, routing_key="%s.%s" % (topic, priority), durable=False, exchange_type="topic", ) publisher.send(message_dict) publisher.close()
def main(): logging.basicConfig(level=logging.DEBUG) args = parser.parse_args() conn = BrokerConnection( hostname=args.hostname, virtual_host=args.vhost, userid=args.user, password=args.password, ) publisher = Publisher(auto_declare=False, connection=conn, exchange=args.exchange, routing_key=args.key) logging.info("Declaring exchange: %s" % args.exchange) publisher.backend.exchange_declare(exchange=args.exchange, type="topic", durable=False, auto_delete=False) while True: line = sys.stdin.readline() if not line: break logging.debug("Sending message '%s'" % line.strip()) publisher.send(line.strip()) publisher.close()
def publish(self, exchange, routing_key, message, auto_declare=False, persistent=True): """publish a message to exchange using routing_key exchange - name of exchange routing_key - interpretation of routing key depends on exchange type message - message content to send auto_declare - automatically declare the exchange (default: false) persistent - store message on disk as well as memory (default: True) """ delivery_mode = 2 if not persistent: delivery_mode = 1 publisher = Publisher(connection=self.broker, exchange=exchange, routing_key=routing_key, auto_declare=auto_declare) publisher.send(message, delivery_mode=delivery_mode) publisher.close()
def cast(msg, event_type, topic, priority): yagi.config.setup(config_path='/etc/yagi.conf') conf = yagi.config.config_with('rabbit_broker') host = conf('host') port = conf('port') user = conf('user') exchange = conf('exchange') password = conf('password') vhost = conf('vhost') message_dict = { 'message_id': str(uuid.uuid4()), 'event_type': event_type, 'publisher_id': 'some_publisher', 'priority': priority, 'timestamp': str(datetime.datetime.utcnow()), 'payload': msg } conn = BrokerConnection(hostname=host, port=port, userid=user, password=password, virtual_host=vhost) publisher = Publisher(connection=conn, exchange=exchange, routing_key="%s.%s" % (topic, priority), durable=False, exchange_type='topic') publisher.send(message_dict) publisher.close()
def return_response(self, response, qid): """AMQP "response" handler. Publishes a response message to a temporary queue. - response is json, qid is string """ message = json.dumps(response) routing_key = self.response_routing_key % qid logging.debug("response to %s with routing_key: %s, message: \n%s" % (self.exchange_name, routing_key, message)) try: publisher = Publisher( connection=self.amqp_connection, exchange=self.exchange_name, exchange_type="topic", routing_key=routing_key, ) publisher.send(message) publisher.close() except: """Trying to send with broken connection. Handle gracefully by waiting for connection and publish again.""" logging.error('%s AMQP error: %s' % (self.service_name, sys.exc_info()[1])) self._wait_connection(5) self.return_response(response, qid)
def _send_message(routing_key, request): conn = DjangoAMQPConnection() publisher = Publisher(connection=conn, exchange='request', exchange_type='topic', routing_key=routing_key, serializer='pickle') publisher.send(request) publisher.close() conn.close()
def reply(routing_key, data): from carrot.connection import DjangoBrokerConnection from carrot.messaging import Publisher conn = DjangoBrokerConnection() publisher = Publisher(connection=conn, exchange="django", routing_key=routing_key, exchange_type="topic") publisher.send(data) publisher.close() conn.close()
def sender_callback(self, routing_key, data): conn = DjangoBrokerConnection() publisher = Publisher(connection=conn, exchange="django_send", routing_key=routing_key, exchange_type="topic", ) publisher.send(data) publisher.close() conn.close() print "Sent object change/delete message for %s" % routing_key
def test_amqp(self): from carrot.connection import DjangoBrokerConnection from carrot.messaging import Publisher, Consumer connection = DjangoBrokerConnection() publisher = Publisher(connection=connection, exchange="collector", exchange_type='topic', routing_key="collector.driver", serializer='json') publisher.send("test") publisher.close() connection.close()
def send_msg(routing_key, data): conn = BrokerConnection( hostname=settings.BROKER_HOST, port=settings.BROKER_PORT, userid=settings.BROKER_USER, password=settings.BROKER_PASSWORD, virtual_host=settings.BROKER_VHOST) publisher = Publisher(connection=conn, exchange="django_send", routing_key=routing_key, exchange_type="topic", ) publisher.send(data) publisher.close() conn.close()
def send_requests(requests, **options): logger = logging if 'logger' in options.keys(): logger = options['logger'] """Send a import request message to be picked up by workers.""" connection = DjangoBrokerConnection() publisher = Publisher(connection=connection, exchange="collector", exchange_type='topic', routing_key="collector.driver", serializer='json') for req in requests: routing_key=req['driver_routing_key'] publisher.send(req, routing_key=routing_key) logger.debug("Sent request with routing_key %s:%s" %( routing_key,req, )) publisher.close() connection.close()
def main(): connection = BrokerConnection( hostname = "localhost", port = 5672, userid = "test", password = "******", virtual_host = "test.com", ) publisher = Publisher( connection = connection, exchange = "messages", routing_key = "awesome", ) for i in xrange(100): publisher.send({"a": i}) publisher.close()
class RabbitMQPipeline(object): def __init__(self, hostname, port, user_id, password, virtual_host, encoder_class): self.queue_connection = BrokerConnection( hostname=hostname, port=port, userid=user_id, password=password, virtual_host=virtual_host ) self.encoder = encoder_class() # Setup / Teardown Rabbit plumbing when spider opens / closes dispatcher.connect(self.spider_opened, signals.spider_opened) dispatcher.connect(self.spider_closed, signals.spider_closed) @classmethod def from_settings(cls, settings): hostname = settings.get('BROKER_HOST') port = settings.get('BROKER_PORT') user_id = settings.get('BROKER_USERID') password = settings.get('BROKER_PASSWORD') virtual_host = settings.get('BROKER_VIRTUAL_HOST') encoder_class = settings.get('QUEUE_SERIALIZER', ScrapyJSONEncoder) return cls(hostname, port, user_id, password, virtual_host, encoder_class) def spider_opened(self, spider): self.publisher = Publisher( connection=self.queue_connection, exchange='', routing_key=spider.name ) def spider_closed(self, spider): self.publisher.close() def process_item(self, item, spider): return deferToThread(self._process_item, item, spider) def _process_item(self, item, spider): self.publisher.send(self.encoder.encode(dict(item))) return item
class MessageQueuePipeline(object): def __init__(self, host_name, port, userid, password, virtual_host, encoder_class): self.q_connection = BrokerConnection(hostname=host_name, port=port, userid=userid, password=password, virtual_host=virtual_host) self.encoder = encoder_class() @classmethod def from_settings(cls, settings): host_name = settings.get('BROKER_HOST', 'localhost') port = settings.get('BROKER_PORT', 5672) userid = settings.get('BROKER_USERID', "guest") password = settings.get('BROKER_PASSWORD', "guest") virtual_host = settings.get('BROKER_VIRTUAL_HOST', "/") encoder_class = settings.get('MESSAGE_Q_SERIALIZER', ScrapyJSONEncoder) return cls(host_name, port, userid, password, virtual_host, encoder_class) @classmethod def from_crawler(cls, crawler): o = cls(crawler) crawler.signals.connect(o.spider_opened, signal=signals.spider_opened) crawler.signals.connect(o.spider_closed, signal=signals.spider_closed) return o def spider_opened(self, spider): self.publisher = Publisher(connection=self.q_connection, exchange="", routing_key=spider.name) def spider_closed(self, spider): self.publisher.close() def process_item(self, item, spider): return deferToThread(self._process_item, item, spider) def _process_item(self, item, spider): self.publisher.send({"scraped_data": self.encoder.encode(dict(item))}) return item
def send_requests(requests, **options): logger = logging if 'logger' in options.keys(): logger = options['logger'] """Send a import request message to be picked up by workers.""" connection = DjangoBrokerConnection() publisher = Publisher(connection=connection, exchange="collector", exchange_type='topic', routing_key="collector.driver", serializer='json') for req in requests: routing_key = req['driver_routing_key'] publisher.send(req, routing_key=routing_key) logger.debug("Sent request with routing_key %s:%s" % ( routing_key, req, )) publisher.close() connection.close()
def _dispatch_message(self, message): if not message.claim(self.context.db): return try: publisher = Publisher(self.context.broker, exchange=message.options.exchange, exchange_type=message.options.exchange_type) publisher.send(message.message, routing_key = message.options.routing_key, delivery_mode = message.options.delivery_mode, mandatory = message.options.mandatory, priority = message.options.priority) publisher.close() except: log.error("Error dispatching deferred message %s: %s" % (message, traceback.format_exc())) self.error_reschedule(message) return False else: log.debug("Dispatched message %s" % message) # sent with no problems, done with it. self.context.db.delete(message) return True
def main(): logging.basicConfig(level=logging.DEBUG) args = parser.parse_args() conn = BrokerConnection( hostname=args.hostname, virtual_host=args.vhost, userid=args.user, password=args.password, ) publisher = Publisher( auto_declare = False, connection = conn, exchange = args.exchange, routing_key = args.key ) logging.info("Declaring exchange: %s" % args.exchange) publisher.backend.exchange_declare(exchange=args.exchange, type="topic", durable=False, auto_delete=False) while True: line = sys.stdin.readline() if not line: break logging.debug("Sending message '%s'" % line.strip()) publisher.send(line.strip()) publisher.close()
class GenericPublisher(object): def __init__(self, config, exchange=None, connect=True): self.config = config self.exchange = exchange self.connection = None if connect: self.connect() # Connect to the message broker def connect(self): if not self.connection: self.connection = BrokerConnection(hostname=self.config.host, port=self.config.port, userid=self.config.user, password=self.config.password, virtual_host=self.config.vhost) # Disconnect from the message broker def disconnect(self): if self.connection: self.connection.close() self.connection = None # Used to publish a pulse message to the proper exchange def publish(self, message): # Make suere there is an exchange given if not self.exchange: raise InvalidExchange(self.exchange) # Make sure there is a message given if not message: raise MalformedMessage(message) # Have the message prepare and validate itself message._prepare() # Connect to the broker if we haven't already if not self.connection: self.connect() # Set up our broker publisher self.publisher = Publisher(connection=self.connection, exchange=self.exchange, exchange_type="topic", routing_key=message.routing_key) # The message is actually a simple envelope format with a payload and # some metadata final_data = {} final_data['payload'] = message.data final_data['_meta'] = message.metadata.copy() final_data['_meta'].update({ 'exchange': self.exchange, 'routing_key': message.routing_key, 'serializer': self.config.serializer, 'sent': time_to_string(datetime.now(timezone(self.config.broker_timezone))) }) # Send the message self.publisher.send(final_data, serializer=self.config.serializer) # Close the publishing connection self.publisher.close()
from carrot.connection import BrokerConnection from carrot.messaging import Publisher import sys conn = BrokerConnection(hostname="localhost", port=5672, userid="guest", password="******", virtual_host="/") publisher = Publisher(connection=conn, exchange="sorting_room", routing_key="jason") publisher.send({"po_box": sys.argv[1]}) publisher.close()
def send(exchange, routing_key, msg): publisher = Publisher(connection=conn, exchange=exchange, routing_key=routing_key) publisher.send(msg) publisher.close()
class EventDispatcher(object): """Send events as messages. :param connection: Carrot connection. :keyword hostname: Hostname to identify ourselves as, by default uses the hostname returned by :func:`socket.gethostname`. :keyword enabled: Set to ``False`` to not actually publish any events, making :meth:`send` a noop operation. You need to :meth:`close` this after use. """ def __init__(self, connection, hostname=None, enabled=True, app=None): self.app = app_or_default(app) self.connection = connection self.hostname = hostname or socket.gethostname() self.enabled = enabled self._lock = threading.Lock() self.publisher = None self._outbound_buffer = deque() if self.enabled: self.enable() def enable(self): conf = self.app.conf self.enabled = True self.publisher = Publisher(self.connection, exchange=conf.CELERY_EVENT_EXCHANGE, exchange_type=conf.CELERY_EVENT_EXCHANGE_TYPE, routing_key=conf.CELERY_EVENT_ROUTING_KEY, serializer=conf.CELERY_EVENT_SERIALIZER) def disable(self): self.enabled = False if self.publisher is not None: self.publisher.close() self.publisher = None def send(self, type, **fields): """Send event. :param type: Kind of event. :keyword \*\*fields: Event arguments. """ if not self.enabled: return self._lock.acquire() event = Event(type, hostname=self.hostname, **fields) try: try: self.publisher.send(event) except Exception, exc: self._outbound_buffer.append((event, exc)) finally: self._lock.release() def flush(self): while self._outbound_buffer: event, _ = self._outbound_buffer.popleft() self.publisher.send(event) def close(self): """Close the event dispatcher.""" self._lock.locked() and self._lock.release() self.publisher and self.publisher.close()
port = 1000 + easyip.EASYIP_PORT buf = 1024 addr = (host, port) # Create socket and bind to address UDPSock = socket(AF_INET, SOCK_DGRAM) UDPSock.bind(addr) print "waiting" # Receive messages while 1: data, addr = UDPSock.recvfrom(buf) if not data: print "Client has exited!" break else: packet = easyip.Packet(data) print "\nReceived message '", packet, "' with %s" % (packet.payload, ) response = easyip.Factory.response(packet) UDPSock.sendto(response.pack(), addr) print "Responded to %s, %s" % addr print "Putting incomming on queue" publisher.send({'address': addr, 'payload': packet.payload}) print "Closing" # Close socket UDPSock.close() publisher.close()