def handle(self, *args, **options): # FIXME: This only works when there's a single transaction # <CdtTrfTxInf> in the pacs.008. filename = options['filename'][0] xml = open(filename, 'r').read() root = pacs008.create_from_string(xml) payment = { 'source_iban': root.debtor_iban, 'destination_iban': root.creditor_iban, 'source_bic': root.debtor_bic, 'destination_bic': root.creditor_bic, 'amount': root.amount, 'currency': str(root.currency), # FIXME: django-xml is buggy here 'payload': xml, } # Setup the queue where the router is waiting qname = "{}_{}".format(root.creditor_bic, "recv") queue = RedisSMQ(host="127.0.0.1", qname=qname) message_id = queue.sendMessage().message(yaml.dump(payment)).execute() self.success("Payment payload delivered: {}".format(message_id))
def __init__(self, qname, host=DEFAULT['server']): self.host = host self.qname = qname self.queue = RedisSMQ(host=host, qname=qname) self.consumer = None self.callback = None try: self.queue.createQueue(delay=0).maxsize(-1).vt(0).execute() except Exception as e: logging.error('[Exception] RSMQueue createQueue: %s', e) print('[Exception] RSMQueue createQueue: %s', e)
def __init__(self): self.config = ServiceConfig() self.logger = self.config.get_logger("redis_tasks") client = pymongo.MongoClient(f"mongodb://{self.config.mongo_host}:{self.config.mongo_port}") self.pdf_paragraph_db = client["pdf_paragraph"] self.results_queue = RedisSMQ( host=self.config.redis_host, port=self.config.redis_port, qname=self.config.results_queue_name, )
def __init__(self, host, port='6379', qname='message_sender'): self.queue = RedisSMQ(host=host, port=port, qname=qname) self.msg = [] try: # 删除queue如果已经存在 self.queue.deleteQueue().exceptions(False).execute() except Exception as e: print(e) try: # 创建queue self.queue.createQueue(delay=0).vt(0).maxsize(-1).execute() except Exception as e: print(e)
def handle(self, *args, **options): # Setup the queue where the router is waiting qname = "{}_{}".format(options['source'][0], "recv") queue = RedisSMQ(host="127.0.0.1", qname=qname) # Read the payload from file with open(options['payload_file'][0]) as fh: payload = fh.read() # Send the payment packet message_id = queue.sendMessage().message(payload).execute() self.success("Payment payload delivered: {}".format(message_id))
def consume(self) -> None: host, port = self.consumers[0].actor.service.address.split(':') while True: if self.stop: break try: queue = RedisSMQ(host=host, port=port, qname=self.consumers[0].topic) if any(consumer.enable_topic_creation for consumer in self.consumers): try: queue.createQueue(delay=0).vt(INFINITE_QUEUE_VISIBILITY).execute() except QueueAlreadyExists: pass try: msg = queue.receiveMessage().exceptions(False).execute() if msg: self.consume_message( key=None, value=msg['message'], headers={} ) except AttributeError: pass queue.quit() except RedisConnectionError: logging.warning('Couldn\'t establish a connection to Redis instance at %s:%s', host, port) time.sleep(1)
def handle(self, *args, **options): bic = options['destination'][0] # Setup the queue where the router delivering a packet from # waiting. qname = "{}_{}".format(bic, "send") queue = RedisSMQ(host="127.0.0.1", qname=qname) # Receive a payment packet try: msg = queue.receiveMessage().execute() # Process payload from YAML packet = yaml.safe_load(msg['message']) self.success("Payment packet received {}".format( self.format_payment(packet))) queue.deleteMessage(id=msg['id']).execute() except NoMessageInQueue: self.notice("No payment packets for {} ".format(bic))
class RedisUtils(): def __init__(self, host): self.conn = RedisSMQ(host=host, qname="accounts") def redis_enqueue(self, msg): self.conn.sendMessage(delay=0).message(msg).execute() def redis_dequeue(self): return self.conn.popMessage().execute()['message'] def redis_queue_length(self): return self.conn.getQueueAttributes().execute()['msgs'] def redis_quit(self): return self.conn.quit() def redis_clear_queue(self): self.conn.deleteQueue().exceptions(False).execute() self.conn.createQueue(delay=0).vt(20).execute()
def subscribe_to_extractions_tasks_queue(self): while True: try: self.results_queue.createQueue().vt(120).exceptions(False).execute() extractions_tasks_queue = RedisSMQ( host=self.config.redis_host, port=self.config.redis_port, qname=self.config.tasks_queue_name, ) extractions_tasks_queue.createQueue().vt(120).exceptions(False).execute() self.logger.info(f"Connecting to redis: {self.config.redis_host}:{self.config.redis_port}") redis_smq_consumer = RedisSMQConsumer( qname=self.config.tasks_queue_name, processor=self.process, host=self.config.redis_host, port=self.config.redis_port, ) redis_smq_consumer.run() except redis.exceptions.ConnectionError: self.logger.error(f"Error connecting to redis: {self.config.redis_host}:{self.config.redis_port}") sleep(20)
def _create_topic(address: str, topic: str, ssl: bool = False): host, port = address.split(':') queue = RedisSMQ(host=host, port=port, qname=topic) try: queue.createQueue(delay=0).vt(INFINITE_QUEUE_VISIBILITY).execute() except QueueAlreadyExists: pass logging.info('Queue %s created', topic) queue.quit()
def _produce(self, key: str, value: str, headers: dict, payload: AsyncProducerPayload) -> None: host, port = self.actor.service.address.split(':') try: queue = RedisSMQ(host=host, port=port, qname=self.topic) if payload.enable_topic_creation: try: queue.createQueue(delay=0).vt(INFINITE_QUEUE_VISIBILITY).execute() except QueueAlreadyExists: pass try: queue.sendMessage(delay=0).message(value).execute() except AttributeError: pass queue.quit() except RedisConnectionError: logging.warning('Couldn\'t establish a connection to Redis instance at %s:%s', host, port) raise
class MsgSender: def __init__(self, host, port='6379', qname='message_sender'): self.queue = RedisSMQ(host=host, port=port, qname=qname) self.msg = [] try: # 删除queue如果已经存在 self.queue.deleteQueue().exceptions(False).execute() except Exception as e: print(e) try: # 创建queue self.queue.createQueue(delay=0).vt(0).maxsize(-1).execute() except Exception as e: print(e) def send_result(self, result): message_id = self.queue.sendMessage(delay=0).message( str(result)).execute() self.msg.append(message_id) if len(self.msg) > 20: rt = self.queue.deleteMessage(id=self.msg[0]).execute() if rt: print("RedisSMQ send_result block") del self.msg[0]
""" Script to convert openpose output into bbox """ import json import numpy as np from rsmq import RedisSMQ import base64 import json QUEUE3 = "smplOrigQue" queue3 = RedisSMQ(host="127.0.0.1", qname=QUEUE3) msg3 = [] kp_human = {} def write_human_pose(data): with open('/home/feng/human_zj/rtviewer/public/resource/model/human.json', 'w') as json_file: json_file.write(json.dumps(data)) #str_3 = json_file.read() #print('human pose writed!') def read_json(json_path): with open(json_path) as f: data = json.load(f) kps = [] for people in data['people']: kp = np.array(people['pose_keypoints_2d']).reshape(-1, 3) kps.append(kp)
dir_path = os.path.dirname(os.path.realpath(__file__)) #sys.path.append('/home/feng/openpose_py27/build/python'); import threading import glob import multiprocessing from rsmq import RedisSMQ import base64 smpl_model_used = load_model( '/home/feng/human_zj/SMPL_python_v.1.0.0/smpl/models/basicmodel_m_lbs_10_207_0_v1.0.0.pkl' ) outmesh_path = '/media/ramdisk/result.obj' QUEUE1 = "smplOrigQue" QUEUE2 = "smplObjQue" queue1 = RedisSMQ(host="127.0.0.1", qname=QUEUE1) queue2 = RedisSMQ(host="127.0.0.1", qname=QUEUE2) try: queue1.deleteQueue().exceptions(False).execute() except Exception as e: print(e) try: queue1.createQueue(delay=0).vt(0).maxsize(-1).execute() except Exception as e: print(e) try: queue2.deleteQueue().exceptions(False).execute() except Exception as e:
import ui from random import random from console import hud_alert from rsmq import RedisSMQ queue = RedisSMQ(host="192.168.75.243", qname="home-office-lights") def slider_action(sender): # Get the root view: v = sender.superview # Get the sliders: r = v['slider1'].value g = v['slider2'].value b = v['slider3'].value # Create the new color from the slider values: v['view1'].background_color = (r, g, b) v['label1'].text = '#%.02X%.02X%.02X' % (int(r * 255), int( g * 255), int(b * 255)) def set_action(sender): r = int(sender.superview['slider1'].value * 255) g = int(sender.superview['slider2'].value * 255) b = int(sender.superview['slider3'].value * 255) queue.sendMessage().message({ "type": "solid-colour", "r": r, "g": g, "b": b }).execute()
import json import os import time import sys from rsmq import RedisSMQ from rsmq.cmd import NoMessageInQueue queue = RedisSMQ(host=os.getenv('REDIS'), qname="home-office-lights") def handle_msg(strip_manager, msg): print("Handling {} message".format(msg['type'])) if msg['type'] == 'solid-colour': strip_manager.solid_color(msg['r'], msg['g'], msg['b']) if msg['type'] == 'alert': strip_manager.alert(msg['r'], msg['g'], msg['b']) if msg['type'] == 'off': strip_manager.clear() def server(): # This import is scoped so that rpi_ws281x does not need to be installed to send messages from strip_manager import StripManager strip_manager = StripManager.default() queue.deleteQueue().exceptions(False).execute() queue.createQueue().execute() previous_msg = None
from pprint import pprint import time import sys from rsmq import RedisSMQ queue = RedisSMQ(host="127.0.0.1", qname="myqueue", port=6379, ns="rsmq") # Delete Queue if it already exists, ignoring exceptions queue.deleteQueue().exceptions(False).execute() # Create Queue with default visibility timeout of 20 and delay of 0 # demonstrating here both ways of setting parameters queue.createQueue(delay=0).vt(20).execute() iteration = sys.argv[1] count = 0 while count < int(iteration): count += 1 message_id = queue.sendMessage(delay=0).message("Task number " + str(count)).execute() pprint({'queue_status': queue.getQueueAttributes().execute()})
def __init__(self, host): self.conn = RedisSMQ(host=host, qname="accounts")
class RSMQueue(object): _msg = [] def __init__(self, qname, host=DEFAULT['server']): self.host = host self.qname = qname self.queue = RedisSMQ(host=host, qname=qname) self.consumer = None self.callback = None try: self.queue.createQueue(delay=0).maxsize(-1).vt(0).execute() except Exception as e: logging.error('[Exception] RSMQueue createQueue: %s', e) print('[Exception] RSMQueue createQueue: %s', e) def set_callback(self, callback): self.callback = callback def publish(self, message): message_id = self.queue.sendMessage(delay=0).message(message).execute() self._msg.append(message_id) if len(self._msg) > 1: try: self.queue.deleteMessage(id=self._msg[0]).execute() except Exception as e: logging.error('[Exception] RSMQueue publish: %s', e) print('[Exception] RSMQueue publish: %s', e) del self._msg[0] return message_id def deleteMessage(self, mid): return self.queue.deleteMessage(id=mid).execute() def subscribe1(self, qname, callback): self.consumer = RedisSMQConsumerThread(qname, callback, host=DEFAULT['server']) self.consumer.start() return self.consumer def receiveMessage(self, callback): try: id, message, rc, ts = self.queue.popMessage().execute() if callback and callable(callback): callback(message) except Exception as e: print('[Exception] receivemessage', e) def subscribe(self, callback, obj, freq=10): queue = self.queue def f(callback): while True: try: rt = queue.popMessage().execute() # print(rt) if rt['id'] and callback and callable(callback): callback(rt['message'], obj) except Exception as e: # print('[Exception] receivemessage', e) pass time.sleep(1/freq) t = Thread(target=f, args=(callback,)) t.start() return t def cancel_subscribe(self): if self.consumer: self.consumer.stop() def peak(self): def _peak(id, message, rc, ts): print("\t\tpeak", id, message, rc, ts) time.sleep(0.1) return False self.subscribe( _peak)
def main(argv=None): if argv is None: argv = sys.argv ''' Parse args and run producer ''' parser = argparse.ArgumentParser() parser.add_argument("-q", "--queue", dest="queue", action="store", default="queue", help="queue name [default: %(default)s]") parser.add_argument("-n", "--namespace", dest="ns", action="store", default="test", help="queue namespace [default: %(default)s]") parser.add_argument( "-c", "--count", dest="count", action="store", type=int, default=0, help="number of messages to send. If less than 1, send continuously " + "[default: %(default)s]") parser.add_argument( "-i", "--interval", dest="interval", type=float, default=1.5, help="Interval, in seconds, to send [default: %(default)s]") parser.add_argument( "-d", "--delay", dest="delay", type=int, default=0, help="delay, in seconds, to send message with [default: %(default)s]") parser.add_argument("-v", "--visibility_timeout", dest="vt", type=int, default=None, help="Visibility Timeout[default: %(default)s]") parser.add_argument("--delete", dest="delete", action="store_true", default=False, help="If set, delete queue first") parser.add_argument("--no-trace", dest="trace", action="store_false", default=True, help="If set, hide trace messages") parser.add_argument("-H", dest="host", default="127.0.0.1", help="Redis Host [default: %(default)s]") parser.add_argument("-P", dest="port", type=int, default=6379, help="Redis Port [default: %(default)s]") # Parse command line args` args = parser.parse_args() # Create RedisSMQ queue controller LOG.info( "Creating RedisSMQ controller for redis at %s:%s, using default queue: %s:%s", args.host, args.port, args.ns, args.queue) rsqm = RedisSMQ(qname=args.queue, host=args.host, port=args.port, ns=args.ns, vt=args.vt, delay=args.delay, trace=args.trace) if args.delete: rsqm.deleteQueue(qname=args.queue, quiet=True).exceptions(False).execute() # Create queue if it is missing. Swallow errors if already exists rsqm.createQueue(qname=args.queue, quiet=True).exceptions(False).execute() # Start Producing produce(rsqm, "%s:%s" % (args.ns, args.queue), args.count, args.interval)
def handle(self, *args, **options): # Load the configuration self.success("Booting the router by reading configuration...") # FIXME: Configuration needs to be better deployed (and not hardwired) with open("csm.yaml") as fh: config = yaml.load(fh.read(), Loader=yaml.FullLoader) bics = [x['bic'] for x in config['participants']] self.success("Found PSPs with BICs: {}".format(", ".join(bics))) # Setup queues for all the PSPs self.success("Setting up interface for each PSP...") for psp in config['participants']: bic = psp['bic'] name = psp['name'] for direction in ['send', 'recv']: qname = "{}_{}".format(bic, direction) queue = RedisSMQ(host="127.0.0.1", qname=qname) try: queue.createQueue(delay=0, vt=20, quiet=True).execute() except QueueAlreadyExists: pass self.QUEUES.setdefault(bic, {}) self.QUEUES[bic][direction] = queue self.success("Interface set up for {} ({})".format( bic, name)) # Start event loop trying to read messages from the different queues # FIXME: This is completely naive way to do this, but it is # intentional and will be switched over to Kafka at a later # stage. self.success("Listening for payment packets...") while True: for bic, queues in self.QUEUES.items(): # Receive a payment packet try: queue = queues['recv'] msg = queue.receiveMessage().execute() # Process payload from YAML packet = yaml.safe_load(msg['message']) self.success("Payment packet received: {}".format( self.format_payment(packet))) queue.deleteMessage(id=msg['id']).execute() except NoMessageInQueue: self.notice("No payment packets for {} [{}]".format( bic, time.asctime())) continue # Authorise a payment packet; if not authorised just # drop the packet. # FIXME: The payment packet should be an object and we # should have methods for routing etc around that. [Or # maybe not as we have a routing service for the # routing. But the payment packet should certainly be # an object.] routserv = RoutingService() if not routserv.authorise(packet): # FIXME: Non-authorised packets should be returned # to sender. The router would need to have more in # the payment packet to describe what a returned # packet is. Therefore we will need to have # unified packet types. self.success("Payment packet authorisation failed: {}".format( routserv.format_payment(packet))) continue # we just drop the non-authorised packet self.success("Payment packet authorisation succeeded: {}".format( routserv.format_payment(packet))) # Route the packet by finding out what the destination # interface is. destination_bic = routserv.route(packet) if not destination_bic: self.error("No destination for payment packet {}".format( routserv.format_payment(packet))) continue self.success("Routing payment packet to destination: {}".format( routserv.format_payment(packet))) # Pass the message along to the destination BIC. qname = "{}_{}".format(destination_bic, "send") queue = RedisSMQ(host="127.0.0.1", qname=qname) message_id = queue.sendMessage().message( yaml.safe_dump(packet)).execute() self.success("Payment packet sent: {}".format( routserv.format_payment(packet))) time.sleep(1) # just so we don't use _all_ CPU
from pprint import pprint import time from rsmq import RedisSMQ # Create controller. # In this case we are specifying the host and default queue name queue = RedisSMQ(host="127.0.0.1", qname="my-queue") # Delete Queue if it already exists, ignoring exceptions queue.deleteQueue().exceptions(False).execute() # Create Queue with default visibility timeout of 20 and delay of 0 # demonstrating here both ways of setting parameters queue.createQueue(delay=0).vt(20).execute() i = 0 try: while True: i += 1 # Send a message with a 2 second delay message_id = queue.sendMessage().message(f"Hello World {i}").execute() pprint({'queue_status': queue.getQueueAttributes().execute()}) time.sleep(0.1) except KeyboardInterrupt: print("ending the work")
class QueueProcessor: def __init__(self): self.config = ServiceConfig() self.logger = self.config.get_logger("redis_tasks") client = pymongo.MongoClient(f"mongodb://{self.config.mongo_host}:{self.config.mongo_port}") self.pdf_paragraph_db = client["pdf_paragraph"] self.results_queue = RedisSMQ( host=self.config.redis_host, port=self.config.redis_port, qname=self.config.results_queue_name, ) def process(self, id, message, rc, ts): try: task = Task(**message) except ValidationError: self.logger.error(f"Not a valid message: {message}") return True self.logger.info(f"Valid message: {message}") try: extraction_data = extract_paragraphs(task) if not extraction_data: extraction_message = ExtractionMessage( tenant=task.tenant, task=task.task, params=task.params, success=False, error_message="Error getting the xml from the pdf", ) self.results_queue.sendMessage().message(extraction_message.dict()).execute() self.logger.error(extraction_message.json()) return True results_url = f"{self.config.service_url}/get_paragraphs/{task.tenant}/{task.params.filename}" file_results_url = f"{self.config.service_url}/get_xml/{task.tenant}/{task.params.filename}" extraction_message = ExtractionMessage( tenant=extraction_data.tenant, task=task.task, params=task.params, success=True, data_url=results_url, file_url=file_results_url, ) self.pdf_paragraph_db.paragraphs.insert_one(extraction_data.dict()) self.logger.info(extraction_message.json()) self.results_queue.sendMessage(delay=3).message(extraction_message.dict()).execute() return True except Exception: self.logger.error("error", exc_info=1) return True def subscribe_to_extractions_tasks_queue(self): while True: try: self.results_queue.createQueue().vt(120).exceptions(False).execute() extractions_tasks_queue = RedisSMQ( host=self.config.redis_host, port=self.config.redis_port, qname=self.config.tasks_queue_name, ) extractions_tasks_queue.createQueue().vt(120).exceptions(False).execute() self.logger.info(f"Connecting to redis: {self.config.redis_host}:{self.config.redis_port}") redis_smq_consumer = RedisSMQConsumer( qname=self.config.tasks_queue_name, processor=self.process, host=self.config.redis_host, port=self.config.redis_port, ) redis_smq_consumer.run() except redis.exceptions.ConnectionError: self.logger.error(f"Error connecting to redis: {self.config.redis_host}:{self.config.redis_port}") sleep(20)