class SubscriberThread(threading.Thread): """ This class uses the Subscriber in a way that handles the "slow subscriber" problem gracefully. The subscriber handles IO in a background thread and queues all revieved messages. The main thread needs only to call handler() to invoke the desired callback for each message tag recieved off the queue. In this way all callback functions are called from the main thread and messages are taken off of the network stack ASAP in the background thread. """ def __init__(self, *args, **kwargs): threading.Thread.__init__(self) self.subscriber = Subscriber(*args, **kwargs) self.queue = Queue.Queue() self.methods = {} self.daemon = True def queue(tag, message): self.queue.put( (tag, message) ) self.callback = queue def subscribe(self, tag, fun, description=None): self.methods[tag] = fun self.subscriber.subscribe(tag, self.callback, description) def handler(self, block=True, timeout=None): tag, message = self.queue.get(block, timeout) self.methods[tag](tag, message) def run(self): self.subscriber.start()
def worker() -> None: sub = Subscriber(('main', 'bot')) logger.info('Bot worker is already running') while True: video_dict = sub.do_subscribe() if 'Msg' in video_dict: call_bot(video_dict)
def worker(): sub = Subscriber(('main',)) while True: data: dict = sub.do_subscribe() if data is not False: t = Thread(target=process_video, args=(data,), daemon=True) t.start()
def sub_thread(): sub = Subscriber(('hls', )) while True: data: dict = sub.do_subscribe() if data is not False: worker = HlsGeneration(data) t = Thread(target=worker.call_hls_generation, daemon=True) t.start()
def start() -> None: sub = Subscriber(('main', 'cq_rescue')) logger.info('Cq worker is already running') while True: video_dict = sub.do_subscribe() if video_dict is not False: worker = CQWorker(video_dict) asyncio.run(worker.main())
def worker() -> None: sub = Subscriber(('upload',)) while True: upload_dict = sub.do_subscribe() if upload_dict is not False: if upload_dict.get('Record', False): t = Thread(target=upload_record, args=(upload_dict,), daemon=True) else: t = Thread(target=upload_video, args=(upload_dict,), daemon=True) t.start()
def __init__(self, *args, **kwargs): threading.Thread.__init__(self) self.subscriber = Subscriber(*args, **kwargs) self.queue = Queue.Queue() self.methods = {} self.daemon = True def queue(tag, message): self.queue.put( (tag, message) ) self.callback = queue
def get_endpoint(self, endpoint): """ Parse /instrument/channel into specific channel handlers. """ if not endpoint.startswith('/'): return try: _, name, channel = endpoint.split('/', 3) except ValueError: return # Clients should not be able to create arbitrary instruments if name not in INSTRUMENTS: raise HTTPError(404, "Instrument %s is not online" % name) #print "Web connection to",name,channel,INSTRUMENTS[name],INSTRUMENTS[name].channel[channel] #print INSTRUMENTS[name].channel[channel].channel_state() # Don't allow control from the general web connection if channel != 'control': return lambda *args, **kw: Subscriber( INSTRUMENTS[name].channel[channel], *args, **kw)
def get_endpoint(self, endpoint): """ Parse /instrument/channel into specific channel handlers. """ if not endpoint.startswith('/'): return try: _, name, channel = endpoint.split('/', 3) except ValueError: return if name not in INSTRUMENTS: INSTRUMENTS[name] = instrument.Instrument(name=name) #print "NICE connection to",name,channel,INSTRUMENTS[name],INSTRUMENTS[name].channel[channel] #print INSTRUMENTS[name].channel[channel].channel_state() # There is role reversal with publisher and subscriber for the control # channel: the instrument is acting as a single subscriber for all # the web clients who are publishing commands to control it. if channel == 'control': return lambda *args, **kw: Subscriber( INSTRUMENTS[name].channel[channel], *args, **kw) elif channel in INSTRUMENTS[name].channel: return lambda *args, **kw: Publisher( INSTRUMENTS[name].channel[channel], *args, **kw)
def subscriber(): """Start Subscriber""" click.echo('Starting subscriber...') config = PubSubConfig.from_object(current_app.config) Subscriber(config).start()
import datetime, pymongo from pubsub import Publisher, Subscriber CNAME = 'messages' database = pymongo.MongoClient()['pubsub'] def log(message): print message p = Publisher(database, CNAME) s = Subscriber(database, CNAME, callback = log) # p.publish({ 'text': 'xxx yosldsjfsdh' }) s.listen()
from pubsub import Broker, Publisher, Subscriber br = Broker('./store/') p1 = Publisher('p1', br) p1.addTopic('t1') p2 = Publisher('p2', br) p2.addTopic('t2') c1 = Subscriber('c1', br) c1.addTopic('t1') c2 = Subscriber('c2', br) c2.addTopic('t2') p2.publish('t2', 'hello t2') p1.publish('t1', 'hello t1')
#!/usr/bin/python from message_bus import MessageBus from pubsub import Publisher, Subscriber from flask import Flask import time bus = MessageBus() bus.add_topic('saleem') publisher = Publisher('Pub1') bus.bind('saleem', publisher) sub1 = Subscriber('Sub1') sub2 = Subscriber('Sub2') bus.bind('saleem', sub1) bus.bind('saleem', sub2) publisher.publish('Hello Saleem') for x in xrange(20): publisher.publish('Pub1 : %s: Counter ' % (x)) time.sleep(1) sub3 = Subscriber('Sub3') bus.bind('saleem', sub3)
def __init__(self, video_dict: dict) -> None: self.video_dict = video_dict self.end_sub = Subscriber(('cq', ))
class CQWorker: def __init__(self, video_dict: dict) -> None: self.video_dict = video_dict self.end_sub = Subscriber(('cq', )) @staticmethod def formatter(event_info: dict) -> dict: def time_formatter() -> str: time_obj = datetime.fromtimestamp(raw_time) struct_time = datetime.strftime(time_obj, '%Y年%m月%d日%H:%M:%S') return struct_time msg = event_info['message'] raw_time = event_info['time'] time = time_formatter() struct_event_info = {'msg': msg, 'time': time} return struct_event_info def txt_recorder(self, event_info: dict): with open(f'{config["ddir"]}/{self.video_dict["Title"]}.txt', 'a') as f: f.write(f"{event_info['time']}: {event_info['msg']} \n") logger.warning(f"{event_info['time']}: {event_info['msg']}") async def ws_interaction(self) -> None: while True: try: async with websockets.connect(config['cq_ws_uri']) as ws: while True: is_end = self.end_record() if is_end: return result = await ws.recv() event_info = self.event_filter(json.loads(result)) if event_info: struct_event_info = self.formatter(event_info) self.txt_recorder(struct_event_info) except websockets.ConnectionClosedError: logger.error('Ws connection was broken') @staticmethod def event_filter(event_info: dict) -> Union[bool, dict]: if event_info['post_type'] != 'message': return False if event_info['user_id'] not in config['cq_record_list']: print(event_info) return False if '【' not in event_info['message']: return False return event_info def end_record(self) -> Union[bool, None]: target = self.end_sub.do_subscribe_nowait() if target == self.video_dict['Target']: logger.info(f'{self.video_dict["Title"]} Done') return True return None def upload_record(self, ddir: str) -> None: pub = Publisher() upload_dict = { 'Title': self.video_dict['Title'] + '.txt', 'Target': self.video_dict['Target'], 'Path': f'{ddir}/{self.video_dict["Title"]}.txt', 'User': self.video_dict['User'], 'Record': True, 'Origin_Title': self.video_dict['Origin_Title'] } pub.do_publish(upload_dict, 'upload') async def main(self) -> None: user_config = get_user(self.video_dict['User']) if not user_config['record']: return None ddir = get_ddir(user_config) self.video_dict['Origin_Title'] = self.video_dict['Title'] self.video_dict['Title'] = AdjustFileName( self.video_dict['Title']).adjust(ddir) await self.ws_interaction() self.upload_record(ddir)