Пример #1
0
class MqServer(object):
    """
    exchange='E_X7_W2S', queue='Q_X7_W2S',routing_key = 'RK_X7_W2S'
    """

    def __init__(self, callback, kwargs):
        self.callback = callback
        if kwargs:
            self.kwargs = kwargs
        else:
            self.kwargs = MqDict

    def connect(self, hostname="localhost", userid="guest", password="******", virtual_host="/"):
        self.conn = BrokerConnection(hostname, userid, password, virtual_host)
        # define Web2Server exchange
        exchange = Exchange(self.kwargs["X7_E"], type="direct")
        self.queue = Queue(self.kwargs["X7_Q"], exchange, routing_key=self.kwargs["X7_RK"])
        channel = self.conn.channel()

        consumer = Consumer(channel, self.queue, callbacks=[self.callback])
        consumer.consume()

    def run(self, once=False):
        if once:
            self.conn.drain_events()
        else:
            while True:
                self.conn.drain_events()

    def get(self):
        message = self.queue.get(block=True)
        message.ack()
        return message
Пример #2
0
 def _consume(cnx: BrokerConnection, timesup: int) -> None:
     try:
         cnx.drain_events(timeout=timesup)
     except kombu_exceptions.TimeoutError:
         pass
     except(kombu_exceptions.ChannelLimitExceeded,
            kombu_exceptions.ConnectionLimitExceeded,
            kombu_exceptions.OperationalError,
            kombu_exceptions.NotBoundError,
            kombu_exceptions.MessageStateError,
            kombu_exceptions.LimitExceeded) as err:
         raise ChannelFailureException("Error connecting to RabbitMQ, see inner exception for details", err)
Пример #3
0
 def _consume(cnx: BrokerConnection, timesup: int) -> None:
     try:
         cnx.drain_events(timeout=timesup)
     except kombu_exceptions.TimeoutError:
         pass
     except (kombu_exceptions.ChannelLimitExceeded,
             kombu_exceptions.ConnectionLimitExceeded,
             kombu_exceptions.OperationalError,
             kombu_exceptions.NotBoundError,
             kombu_exceptions.MessageStateError,
             kombu_exceptions.LimitExceeded) as err:
         raise ChannelFailureException(
             "Error connecting to RabbitMQ, see inner exception for details",
             err)
Пример #4
0
 def _consume(cnx: BrokerConnection, timesup: int) -> None:
     try:
         cnx.drain_events(timeout=timesup)
     except kombu_exceptions.TimeoutError:
         self._logger.debug("Time out reading from queue %s", self._queue_name)
         cnx.heartbeat_check()
     except(kombu_exceptions.ChannelLimitExceeded,
            kombu_exceptions.ConnectionLimitExceeded,
            kombu_exceptions.OperationalError,
            kombu_exceptions.NotBoundError,
            kombu_exceptions.MessageStateError,
            kombu_exceptions.LimitExceeded) as err:
         raise ChannelFailureException("Error connecting to RabbitMQ, see inner exception for details", err)
     except (OSError, IOError, ConnectionError) as socket_err:
         self._reset_connection()
         raise ChannelFailureException("Error connecting to RabbitMQ, see inner exception for details", socket_err)
Пример #5
0
def queue_drain():
    filename = "meta"
    fptr = open(filename, "r")
    amqpurl = fptr.readline().strip()
    exchange_name = fptr.readline().strip()

    exchange = Exchange(exchange_name, type="direct")
    D_queue = Queue(exchange_name, exchange, routing_key=exchange_name, exclusive=False)
    connection = BrokerConnection(amqpurl)
    channel = connection.channel()
    queue = D_queue(channel)
    queue.declare()
    consumer = Consumer(channel, queue, callbacks=[work])
    consumer.qos(prefetch_size=0, prefetch_count=1, apply_global=False)
    consumer.consume(no_ack=False)
    print "about to drain"
    for i in range(0, 30):
        try:
            connection.drain_events(timeout=1)
        except socket.timeout, ex:
            pass
Пример #6
0
    def run(self):
        print "exchange = %s, queue = %s, routing_key = %s, amqpurl = %s" % (self.testname, self.testname, self.testname, self.amqpurl)
        exchange = Exchange(self.testname, type="direct")
        D_queue = Queue(self.testname, exchange, routing_key=self.testname, exclusive=False)
        connection = BrokerConnection(self.amqpurl)

        #u = self.amqpurl.replace('amqp', 'http')
        #parts = urlparse.urlparse(u)

        #connection = Connection(host=parts.hostname, userid=parts.username, password=parts.password, port=parts.port, heartbeat=30)

        channel = connection.channel()
        queue = D_queue(channel)
        queue.declare()
        consumer = Consumer(channel, queue, callbacks=[self.work])
        consumer.qos(prefetch_size=0, prefetch_count=1, apply_global=False)
        self.done = False
        consumer.consume(no_ack=False)
        print "about to drain"
        while  not self.done:
            connection.drain_events()
        self.real_work()
Пример #7
0
class Connection:
    connection = None

    def __init__(self, url):
        self.url = url
        self.__connection = None
        self.__running = True
        self.channel = None
        self.sleep_time = 10
        self.reconnect(url)

    @staticmethod
    def get_instance():
        return Connection.connection

    def __connect(self):
        self.__connection = BrokerConnection(self.url)
        self.channel = self.get_channel()

        self.rpc_factory = rpc.RpcFactory(self.channel)
        self.publisher_factory = publisher.PublisherFactory(self.channel)
        self.consumer_factory = consumer.ConsumerFactory(self.channel)

        self.__running = True
        Connection.connection = self

    def get_broker_connection(self):
        if self.__connection is None:
            self.reconnect(self.url)

        return self.__connection

    def get_channel(self):
        if self.channel is None:
            self.channel = self.get_new_channel()
        return self.channel

    def get_new_channel(self):
        if self.__connection is None:
            self.reconnect(self.url)
        return self.__connection.channel()

    def get_rpc_factory(self):
        return self.rpc_factory

    def reconnect(self, url=None):
        cc.acquire()
        if self.__connection is not None:
            self.release()

        if url is not None:
            self.url = url

        logger.debug("reconnect connection")
        attempt = 0
        while True:
            try:
                self.__connect()
                cc.release()
                return
            except Exception as e:
                logging.exception(e)

            logging.debug("retry again in %s s" % self.sleep_time)
            time.sleep(self.sleep_time)
        cc.release()

    def drain_events(self):
        self.__connection.drain_events()

    def release(self):
        Connection.connection = None
        self.__running = False
        self.__connection.release()
        self.__connection.close()
        self.channel = None
        self.__connection = None
    return pformat(obj, indent=4)


#: This is the callback applied when a message is received.
def handle_message(body, message):
    print("Received message: %r" % (body, ))
    print("  properties:\n%s" % (pretty(message.properties), ))
    print("  delivery_info:\n%s" % (pretty(message.delivery_info), ))
    message.ack()

#: Create a connection and a channel.
#: If hostname, userid, password and virtual_host is not specified
#: the values below are the default, but listed here so it can
#: be easily changed.
connection = BrokerConnection(hostname="localhost",
                              userid="guest",
                              password="******",
                              virtual_host="/")
channel = connection.channel()

#: Create consumer using our callback and queue.
#: Second argument can also be a list to consume from
#: any number of queues.
consumer = Consumer(channel, queue, callbacks=[handle_message])
consumer.consume()

#: This waits for a single event.  Note that this event may not
#: be a message, or a message that is to be delivered to the consumers
#: channel, but any event received on the connection.
connection.drain_events()
Пример #9
0
    durable=True,
    auto_delete=False,
    channel=channel,
    routing_key='filr',
)
queue.declare();

def fetch(b,m):
    print b,m

consumer = Consumer(
    channel=connection.channel(),
    queues=queue,
    auto_declare=False,
    callbacks = [fetch]
)
consumer.consume(no_ack=False);

while(True):
    connection.drain_events()
    pass

#execfile('.private-settings')

#sdb = boto.connect_sdb(key_id, sec_key)
#domain = sdb.create_domain('android')
#item = domain.new_item('kral_step1')

#for key,value in kral_step.items():
#    item[key] = value
Пример #10
0
 def _consume(cnx: BrokerConnection, timesup: int) -> None:
     try:
         cnx.drain_events(timeout=timesup)
     except kombu_exceptions.TimeoutError as te:
         pass
Пример #11
0
amqpurl = fptr.readline().strip()
exchange_name = fptr.readline().strip()
try:
    exchange = Exchange(exchange_name, type="direct")
    D_queue = Queue(exchange_name, exchange, routing_key=exchange_name, exclusive=False)
    connection = BrokerConnection(amqpurl)
    channel = connection.channel()
    queue = D_queue(channel)
    queue.declare()
    consumer = Consumer(channel, queue, callbacks=[work])
    consumer.qos(prefetch_size=0, prefetch_count=1, apply_global=False)
    consumer.consume(no_ack=False)
    print "about to drain"
    for i in range(0, 25):
        try:
            connection.drain_events(timeout=1)
        except:
            pass
except Exception, ex:
    print ex
s3url = os.environ['EC2_URL']
s3id = os.environ['EC2_ACCESS_KEY']
s3pw = os.environ['EC2_SECRET_KEY']

parts = urlparse.urlparse(s3url)
host = parts.hostname
port = parts.port
is_secure = parts.scheme == "https"
path = parts.path

ec2conn = EC2Connection(s3id, s3pw, host=host, port=port, debug=0, is_secure=is_secure)
Пример #12
0
class Proxy(object):
	"""
	This Proxy class is used to handle the communication with the rpc server

	:keyword server_id: Default id of the Server (can be declared later see :func:`use_server`)
	:keyword amqp_host: The host of where the AMQP Broker is running.
	:keyword amqp_user: The username for the AMQP Broker.
	:keyword amqp_password: The password for the AMQP Broker.
	:keyword amqp_vhost: The virtual host of the AMQP Broker.
	:keyword amqp_port: The port of the AMQP Broker.
	:keyword ssl: Use SSL connection for the AMQP Broker.
	:keyword timeout: Default timeout for calls in seconds


	"""
	timeout = 0
	response = None
	
	def __init__(self,
				server_id = None,
				amqp_host='localhost', 
				amqp_user ='******',
				amqp_password='******',
				amqp_vhost='/',
				amqp_port=5672,
				ssl=False,
				timeout=0):
		
		
		self.logger = logging.getLogger('callme.proxy')
		self.timeout = 0
		self.is_received = False
		self.connection = BrokerConnection(hostname=amqp_host,
							  userid=amqp_user,
							  password=amqp_password,
							  virtual_host=amqp_vhost,
							  port=amqp_port,
							  ssl=ssl)
		self.channel = self.connection.channel()
		self.timeout = timeout
		my_uuid = gen_unique_id()
		self.reply_id = "client_"+amqp_user+"_ex_" + my_uuid
		self.logger.debug("Queue ID: %s" %self.reply_id)
		src_exchange = Exchange(self.reply_id, "direct", durable=False 
							,auto_delete=True)
		src_queue = Queue("client_"+amqp_user+"_queue_"+my_uuid, exchange=src_exchange, 
						auto_delete=True,
						durable=False)
		
		# must declare in advance so reply message isn't
   		# published before.
		src_queue(self.channel).declare()
		
		
		consumer = Consumer(channel=self.channel, queues=src_queue, callbacks=[self._on_response])
		consumer.consume()		
		
	def _on_response(self, body, message):
		"""
		This method is automatically called when a response is incoming and
		decides if it is the message we are waiting for - the message with the
		result

		:param body: the body of the amqp message already unpickled by kombu
		:param message: the plain amqp kombu.message with aditional information
		"""
		
		if self.corr_id == message.properties['correlation_id'] and \
			isinstance(body, RpcResponse):
			self.response = body
			self.is_received = True
			message.ack()
		
	def use_server(self, server_id=None, timeout=None):
		"""Use the specified server and set an optional timeout for the method
		call
		
		Typical use:
			
			>>> my_proxy.use_server('fooserver').a_remote_func()

		:keyword server_id: The server id where the call will be made.
		:keyword timeout: set or overrides the call timeout in seconds
		:rtype: Return `self` to cascade further calls 

		"""

		if server_id != None:
			self.server_id = server_id
		if timeout !=None:
			self.timeout = timeout
		return self
	
	
	def __request(self, methodname, params):
		"""
		The remote-method-call execution function

		:param methodname: name of the method that should be executed
		:param params: parameter for the remote-method
		:type methodname: string
		:type param: list of parameters
		:rtype: result of the method
		"""
		self.logger.debug('Request: ' + repr(methodname) + '; Params: '+ repr(params))
		
		target_exchange = Exchange("server_"+self.server_id+"_ex", "direct", durable=False,
								auto_delete=True)
		self.producer = Producer(channel=self.channel, exchange=target_exchange,
								auto_declare=False)
		
		rpc_req = RpcRequest(methodname, params)
		self.corr_id = str(uuid.uuid4())
		self.logger.debug('RpcRequest build')
		self.logger.debug('corr_id: %s' % self.corr_id)
		self.producer.publish(rpc_req, serializer="pickle",
							reply_to=self.reply_id,
							correlation_id=self.corr_id)
		self.logger.debug('Producer published')
		
		self._wait_for_result()
		
		self.logger.debug('Result: %s' % repr(self.response.result))
		res = self.response.result
		self.response.result = None
		self.is_received = False

		if self.response.exception_raised:
                        raise res
                else:
                        return res
		
	def _wait_for_result(self):
		"""
		Waits for the result from the server, checks every second if a timeout
		occurred. If a timeout occurs a `socket.timout` exception will be raised.
		"""
		seconds_elapsed = 0
		while not self.is_received:
			try:
				self.logger.debug('drain events... timeout=%d, counter=%d' 
								% (self.timeout, seconds_elapsed))
				self.connection.drain_events(timeout=1)
			except socket.timeout:
				if self.timeout > 0:
					seconds_elapsed = seconds_elapsed + 1
					if seconds_elapsed > self.timeout:
						raise socket.timeout()

	def __getattr__(self, name):
		"""
		This method is invoked, if a method is being called, which doesn't exist on Proxy.
		It is used for RPC, to get the function which should be called on the Server.
		"""
		# magic method dispatcher
		self.logger.debug('Recursion: ' + name)
		return _Method(self.__request, name)
Пример #13
0
 def _consume(cnx: BrokerConnection, timesup: int) -> None:
     try:
         cnx.drain_events(timeout=timesup)
     except kombu_exceptions.TimeoutError:
         pass
Пример #14
0
class Server(object):
	"""
	This Server class is used to provide an RPC server

	:keyword server_id: Id of the server
	:keyword amqp_host: The host of where the AMQP Broker is running.
	:keyword amqp_user: The username for the AMQP Broker.
	:keyword amqp_password: The password for the AMQP Broker.
	:keyword amqp_vhost: The virtual host of the AMQP Broker.
	:keyword amqp_port: The port of the AMQP Broker.
	:keyword ssl: Use SSL connection for the AMQP Broker.
	:keyword threaded: Use of multithreading. If set to true RPC call-execution
		will processed parallel (one thread per call) which dramatically improves
		performance.


	"""
	
	def __init__(self, 
				server_id,
				amqp_host='localhost', 
				amqp_user ='******',
				amqp_password='******',
				amqp_vhost='/',
				amqp_port=5672,
				ssl=False,
				threaded=False):
		self.logger = logging.getLogger('callme.server')
		self.logger.debug('Server ID: %s' % server_id)
		self.server_id = server_id
		self.threaded = threaded
		self.do_run = True
		self.is_stopped = True
		self.func_dict={}
		self.result_queue = queue.Queue()
		target_exchange = Exchange("server_"+server_id+"_ex", "direct", durable=False,
								auto_delete=True)	
		self.target_queue = Queue("server_"+server_id+"_queue", exchange=target_exchange, 
							auto_delete=True, durable=False)
		
		
		
		self.connection = BrokerConnection(hostname=amqp_host,
                              userid=amqp_user,
                              password=amqp_password,
                              virtual_host=amqp_vhost,
                              port=amqp_port,
                              ssl=ssl)
		try:
			self.connection.connect()
		except IOError:
			self.logger.critical("Connection Error: Probably AMQP User has not enough permissions")
			raise ConnectionError("Connection Error: Probably AMQP User has not enough permissions")
		
		self.channel = self.connection.channel()
		
		self.publish_connection = BrokerConnection(hostname=amqp_host,
                              userid=amqp_user,
                              password=amqp_password,
                              virtual_host=amqp_vhost,
                              port=amqp_port,
                              ssl=ssl)
		self.publish_channel = self.publish_connection.channel()
		
		# consume
		self.consumer = Consumer(self.channel, self.target_queue)
		if self.threaded == True:
			self.consumer.register_callback(self._on_request_threaded)
		else:
			self.consumer.register_callback(self._on_request)
		self.consumer.consume()
		
		self.logger.debug('Init done')
		
	def _on_request(self, body, message):
		"""
		This method is automatically called when a request is incoming. It 
		processes the incomming rpc calls in a serial manner (no multithreading)

		:param body: the body of the amqp message already unpickled by kombu
		:param message: the plain amqp kombu.message with aditional information
		"""
		self.logger.debug('Got Request')
		rpc_req = body
		
		if not isinstance(rpc_req, RpcRequest):
			self.logger.debug('Not an RpcRequest Instance')
			return
		
		self.logger.debug('Call func on Server %s' %self.server_id)
		try:
			self.logger.debug('corr_id: %s' % message.properties['correlation_id'])
			self.logger.debug('Call func with args %s' % repr(rpc_req.func_args))
			
			result = self.func_dict[rpc_req.func_name](*rpc_req.func_args)
			
			self.logger.debug('Result: %s' % repr(result))
			self.logger.debug('Build respnse')
			rpc_resp = RpcResponse(result)
		except Exception as e:
			self.logger.debug('exception happened')
			rpc_resp = RpcResponse(e, exception_raised=True)
			
		message.ack()
		
		self.logger.debug('Publish respnse')
		# producer 
		src_exchange = Exchange(message.properties['reply_to'], "direct", durable=False,
							auto_delete=True)
		self.producer = Producer(self.publish_channel, src_exchange, auto_declare=False)
		
		self.producer.publish(rpc_resp, serializer="pickle",
							correlation_id=message.properties['correlation_id'])
		
		self.logger.debug('acknowledge')
		


	def _on_request_threaded(self, body, message):
		"""
		This method is automatically called when a request is incoming and
		`threaded` set to `True`. It processes the incomming rpc calls in 
		a parallel manner (one thread for each request). A seperate Publisher
		thread is used to send back the results.

		:param body: the body of the amqp message already unpickled by kombu
		:param message: the plain amqp kombu.message with aditional information
		"""
		self.logger.debug('Got Request')
		rpc_req = body
		
		if not isinstance(rpc_req, RpcRequest):
			self.logger.debug('Not an RpcRequest Instance')
			return
		
		message.ack()
		self.logger.debug('acknowledge')
		
		def exec_func(body, message, result_queue):
			self.logger.debug('Call func on Server %s' %self.server_id)
			try:
				self.logger.debug('corr_id: %s' % message.properties['correlation_id'])
				self.logger.debug('Call func with args %s' % repr(rpc_req.func_args))
				
				result = self.func_dict[rpc_req.func_name](*rpc_req.func_args)
				
				self.logger.debug('Result: %s' % repr(result))
				self.logger.debug('Build respnse')
				rpc_resp = RpcResponse(result)
			except Exception as e:
				self.logger.debug('exception happened')
				rpc_resp = RpcResponse(e, exception_raised=True)
				
			result_queue.put(ResultSet(rpc_resp, 
									message.properties['correlation_id'],
									message.properties['reply_to']))
				
		p = Thread(target=exec_func, 
				name=message.properties['correlation_id'],
				args=(body, message, self.result_queue))
		p.start()
		
	
	def register_function(self, func, name):
		"""
		Registers a function as rpc function so that is accessible from the 
		proxy.
		
		:param func: The function we want to provide as rpc method
		:param name: The name with which the function is visible to the clients
		"""
		self.func_dict[name] = func
	
	def start(self):
		"""
		Starts the server. If `threaded` is `True` also starts the Publisher 
		thread.
		"""
		self.is_stopped = False
		if self.threaded == True:
			self.pub_thread = Publisher(self.result_queue, self.publish_channel)
			self.pub_thread.start()
			
		while self.do_run:
			try:
				self.logger.debug("drain_events: %s" % repr(self.do_run))
				self.connection.drain_events(timeout=1)
			except socket.timeout:
				self.logger.debug("do_run: %s" % repr(self.do_run))
			except:
				self.logger.debug("interrupt exception" )
				if self.threaded == True:
					self.pub_thread.stop()
				self.consumer.cancel()
				self.connection.close()
				self.publish_connection.close()
				self.is_stopped = True
				return
			
		if self.threaded == True:
			self.pub_thread.stop()
		self.logger.debug("Normal Exit" )
		self.consumer.cancel()
		self.connection.close()
		self.publish_connection.close()
		self.logger.debug("All closed" )
		self.is_stopped = True
		
	def stop(self):
		"""
		Stops the server.
		"""
		self.logger.debug('Stop server')
		self.do_run = False
		while not self.is_stopped:
			self.logger.debug('wait for stop')
			sleep(0.1)