def __init__(self): inQueueManager = QueueManager(address=('/tmp/SVSqueue'), \ authkey='gnarf') inQueueManager.connect() self.inQueue = inQueueManager.get_queue() self.handleMsg = msgHandler(self.logfunc) self.handleMsg.register('init', self.do_init) self.handleMsg.register('addClient', self.do_addClient) self.handleMsg.register('peerQuit', self.do_peerQuit) self.handleMsg.register('exit', self.do_exit) self.handleMsg.register('procDead', self.do_procDead) self.handleMsg.register('chatLobby', self.do_chatLobby) self.handleMsg.register('nameProposed', self.do_nameProposed) self.handleMsg.register('PINGsent', self.do_PINGsent) self.handleMsg.register('PINGreceived', self.do_PINGreceived) self.handleMsg.register('PONGreceived', self.do_PONGreceived) self.handleMsg.register('getUserList', self.do_getUserList) self.handleMsg.register('chatPrivate', self.do_chatPrivate, 3, False) self.handleMsg.register('shutdown', self.do_shutdown) self.handleMsg.register('syslog', self.do_syslog, 3, False) self.clients = {} self.lastClient = 0 self.SVG = SVGhandler() self.init = False self.keep_alive = True
def initialize(self, queue, lsize=50, timeout=60, **kwargs): if not queue: self.QM = QueueManager("0.0.0.0", port = 9998) self.queue = self.QM.Queue(queue_type="python_queue", **kwargs) else: self.QM = QueueManager("0.0.0.0", port = 9998) try: self.queue = self.QM.Queue(queue_type=queue, **kwargs) except Exception as e: print "queue type: `python_queue`, `redis_queue`" import traceback traceback.print_exc() self.asyn_collection = None self.lsize = lsize self.timeout = timeout self.l_list = [] #插入任务 self.u_list = [] #更新任务 #clear all old queue data while self.queue.qsize(): _ = self.queue.get() #set status running self.runable = True self.t = threading.Thread(target=self._run_single) self.t.start()
def initialize(self, queue, lsize=50, timeout=60, **kwargs): if not queue: self.QM = QueueManager("0.0.0.0", port=9998) self.queue = self.QM.Queue(queue_type="python_queue", **kwargs) else: self.QM = QueueManager("0.0.0.0", port=9998) try: self.queue = self.QM.Queue(queue_type=queue, **kwargs) except Exception as e: print "queue type: `python_queue`, `redis_queue`" import traceback traceback.print_exc() self.asyn_collection = None self.lsize = lsize self.timeout = timeout self.l_list = [] #插入任务 self.u_list = [] #更新任务 #clear all old queue data while self.queue.qsize(): _ = self.queue.get() #set status running self.runable = True self.t = threading.Thread(target=self._run_single) self.t.start()
class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer): def __init__(self, server_address, RequestHandlerClass): SocketServer.TCPServer.__init__(self, server_address, RequestHandlerClass) log_trace('I', '0016', "ThreadedTCPServer - constructor", detail="n/a") self.queueManager = QueueManager() log_trace('I', '0034', "QueueManager created", detail="n/a") self.queueManager.processing() log_trace('I', '0035', "QueueManager started", detail="n/a")
def main(): print ' *** start jenkins job script ***' option = get_args() logger = MyLogger.__call__().get_logger() logger.info("Start logger") queue_management_state = QueueManager(option["build_number"], option["jenkins_url"], option["job_name"]) queue_management_state.run()
def do_addClient(self, param): self.lastClient += 1 queuepath = param[2] # we use path to named pipe as client id m = QueueManager(address=(queuepath), authkey='gnarf') m.connect() queue = m.get_queue() self.clients[queuepath] = { \ 'name': '_SVuser' + str(self.lastClient), 'queue': queue, 'state': 'init'}
def addClient(self, conn): queuepath = '/tmp/SVCqueue' + str(self.generateQueueNumber()) m = QueueManager(address=(queuepath), authkey='gnarf') m.start() queue = m.get_queue() proc = multiprocessing.Process( \ target=SVClient.processWorker, \ args=(queuepath,conn,)) proc.start() self.connections[conn] = (m, queuepath, queue, proc)
def __init__(self): self.ID = '/tmp/MDqueue' outQueueManager = QueueManager(address=('/tmp/SVSqueue'), \ authkey='gnarf') outQueueManager.connect() inQueueManager = QueueManager(address=(self.ID), \ authkey='gnarf') inQueueManager.connect() self.inQueue = inQueueManager.get_queue() self.outQueue = outQueueManager.get_queue() self.handleMsg = msgHandler() self.handleMsg.register('privateMsg', self.do_privateMsg) self.handleMsg.register('lobbyMsg', self.do_lobbyMsg) self.handleMsg.register('alert', self.do_alert) self.handleMsg.register('userJoined', self.do_userJoined) self.handleMsg.register('userLeft', self.do_userLeft) self.handleMsg.register('bye', self.do_bye) self.handleMsg.register('userList', self.do_userList) self.handleMsg.register('syslog', self.do_syslog) self.handleCmd = msgHandler(self.logfunc) self.handleCmd.register('userList', self.do_cmd_userList, 2) self.handleCmd.register('master', self.do_cmd_master, 3) self.handleCmd.register('shutdown', self.do_cmd_shutdown, 2) self.handleCmd.register('log', self.do_cmd_log, 2) self.userList = [] self.masterList = [] self.logList = [] self.keep_alive = True
def validParams(): params = QueueManager.validParams() params.addParam( 'queue_template', os.path.join(os.path.abspath(os.path.dirname(__file__)), 'pbs_template'), "Location of the PBS template") return params
class SVShandler: def __init__(self): self.qm = QueueManager(address=('/tmp/SVSqueue'), authkey='gnarf') self.qm.start() self.queue = self.qm.get_queue() self.proc = multiprocessing.Process( \ target=SVService.processWorker, \ args=[]) self.proc.start() def __del__(self): if self.proc.is_alive(): self.send('bye', 'System shutdown', None) self.proc.join() self.qm.shutdown() def addClient(self, queuepath): self.send('addClient', 'new Connection', queuepath) def removeClient(self, queue): self.send('removeClient', 'Peer lost', queue) def send(self, msgtag, text, obj=None): self.queue.put((msgtag, text, obj)) def getQueue(self): return self.queue def cycle(self): if self.proc.is_alive(): return True else: self.proc.join() self.qm.shutdown() return False
def __init__(self): self.qm = QueueManager(address=('/tmp/SVSqueue'), authkey='gnarf') self.qm.start() self.queue = self.qm.get_queue() self.proc = multiprocessing.Process( \ target=SVService.processWorker, \ args=[]) self.proc.start()
def do_init(self, param): if not self.init: self.MDqueuepath = '/tmp/MDqueue' self.mqm = QueueManager(address=(self.MDqueuepath), \ authkey='gnarf') self.mqm.start() self.MDqueue = self.mqm.get_queue() self.clients[self.MDqueuepath] = { \ 'name': 'Majordomo', 'queue': self.MDqueue, 'state': 'master'} self.MDproc = multiprocessing.Process( \ target=Majordomo.processWorker, args=[]) self.MDproc.start() self.init = True
def mainfunction(): """ mainfunction 分別與IBM MQ及Rabbit MQ建立連線通道,進行從IBM MQ Server取得BPM資料,將取得的原始內文,直接publish傳送到Rabbit MQ 的BPM序列中。 除此之外,導入連線例外狀況的容錯處理,重複使用已建立的連線通道,降低建立連線通道成本。 主系統程式架構,由兩大迴圈組成,外層迴圈管控與Server的連線,內層迴圈管控BPM資料取得及傳送。利用FailToSleepSec及DisconnectSec參數,設定連線處理的等待時間。 FailToSleepSec: 設定程式例外處理的休眠時間,包括”NO_MSG”的處理。 DisconnectSec: 設定程式與IBM MQ Server離線後,重新連線的隔間等待時間。 """ # === Local === FailToSleepSec = int(os.getenv('FailToSleepSec')) DisconnectSec = int(os.getenv('DisconnectSec')) # # RabbitMQ Variables user = os.getenv('RABBITMQ_USER') password = os.getenv('RABBITMQ_PASSWORD') host = os.getenv('RABBITMQ_HOST') port = os.getenv('RABBITMQ_PORT') vhost = os.getenv('RABBITMQ_VHOST') exchange = os.getenv('RABBITMQ_BPMEXCHANGE') routing_key = os.getenv('RABBITMQ_ROUTING_KEY') queue = os.getenv('RABBITMQ_QUEUENAME') #user = "******" #password = "******" #host = "172.20.0.220" #port = 5671 #vhost = "isnr" #exchange ="bpm" #routing_key = 'bpm_rk' qm = QueueManager(user, password, host, port, vhost, True) # initial queues qm.declare_queue_binding(exchange, queue, routing_key) # # IBMMQ Variables queue_manager = os.getenv('IBMQUEUE_MANAGER') channel = os.getenv("IBMCHANNEL") host = os.getenv("IBMHOST") port = os.getenv("IBMPORT") queue_name = os.getenv("IBMQUEUE_NAME") user = os.getenv('IBMUSER') password = os.getenv('IBMPASSWORD') #queue_manager = "TIABPM" #channel = "TAISNR.SVRCONN" #host = "10.2.67.76" #port = "1414" #queue_name = "TAISNR.LOCAL.QUEUE" #user = '******' #password = '******' conn_info = "%s(%s)" % (host, port) ibmmqi = IBMMQClientManager(queue_manager, channel, host, port, queue_name, conn_info) keep_connect = True msg = "" connectmsg = "" exceptioncount = 0 getmessagecount = 0 connectcount = 0 while keep_connect: try: ibmmqi.connect(user, password) keep_connect = False connectcount = connectcount + 1 msg = connectmsg = "Connect success !! connectcount:" + str( connectcount) DebugLog(msg, connectmsg, 'MqConnectSucces', '0') except Exception as pymqierror: keep_connect = True msg = connectmsg = "Connect fail !! connectcount:" + str( connectcount) + str(pymqierror) DebugLog(msg, connectmsg, 'MqConnectFail', '0') time.sleep(1) if keep_connect == False: #Connecting in client mode ibmmqi.Queue() keep_running = True count = 1 while keep_running: msg = connectmsg = " keep_running is true!! " try: #How to get the message off a queue msg_body = ibmmqi.getmessage() msg = "Success!! " + msg_body getmessagecount = getmessagecount + 1 connectmsg = "IBMMQ getmessage success!! getmessagecount:" + str( getmessagecount) DebugLog(msg, connectmsg, 'GetMesFormIbmMQ', '1') try: if (qm.is_connected()) == False: qm.connect() qm.publish(exchange, routing_key, msg_body) connectmsg = "RabbitMQ publisher success!! getmessagecount:" + str( getmessagecount) DebugLog(msg, connectmsg, 'BpmToRabbbitMQ', '0') except Exception as RabbitMQerror: qm.connect() msg = connectmsg = ":" + str( count) + "RabbitMQerror Exception !! " + str( RabbitMQerror) DebugLog(msg, connectmsg, 'RabbitMqError', '0') keep_running = True keep_connect = False count = 1 except Exception as pymqierror: exceptioncount = exceptioncount + 1 connectmsg = ":" + str( count) + " Exception !! exceptioncount:" + str( exceptioncount) + "-" + str(pymqierror) if count > 9: keep_running = False keep_connect = True else: keep_running = True keep_connect = False count = count + 1 DebugLog(msg, connectmsg, 'IbmMQException', '0') time.sleep(FailToSleepSec) #time.sleep(2) if keep_running == False: keep_connect = True try: msg = connectmsg = "ibmmqi.close()!! " ibmmqi.close() except Exception as pymqicloseerr: msg = connectmsg = "ibmmqi.close():Exception !! " + str( pymqicloseerr) DebugLog(msg, connectmsg, 'IbmMqClose', '0') time.sleep(1) keep_connect = True try: ibmmqi.disconnect() msg = connectmsg = "ibmmqi.disconnect! getmessagecount:" + str( getmessagecount) except Exception as pymqidisconerr: msg = connectmsg = "ibmmqi.disconnect:Exception !! " + str( pymqidisconerr) DebugLog(msg, connectmsg, 'IbmMqDisconnect', '0') time.sleep(DisconnectSec)
def __init__(self, harness, params): QueueManager.__init__(self, harness, params) self.params = params self.harness = harness self.options = self.harness.getOptions()
def __init__(self, harness, params): QueueManager.__init__(self, harness, params) self.params = params
from flask import Flask, render_template, request, jsonify from QueueManager import QueueManager DEBUG_MODE = True app = Flask(__name__) queueManager = QueueManager() @app.route('/') def index(): return render_template('index.html') #### ENDPOINTS #### # user_id(int), name(str), doctor_name(str), scheduled_start_time(int) @app.route('/schedule/add', methods=['POST']) def schedule_add(): user_id = None name = None doctor_name = None scheduled_start_time = None if request.form: user_id = int(request.form.get('user_id')) name = request.form.get('name') doctor_name = request.form.get('doctor_name') scheduled_start_time = int(request.form.get('scheduled_start_time'))
def fetchdata(): # === Local === # # RabbitMQ Variables user = os.getenv('RABBITMQ_USER') password = os.getenv('RABBITMQ_PASSWORD') host = os.getenv('RABBITMQ_HOST') port = os.getenv('RABBITMQ_PORT') vhost = os.getenv('RABBITMQ_VHOST') exchange = os.getenv('RABBITMQ_BPMEXCHANGE') routing_key = os.getenv('RABBITMQ_ROUTING_KEY') #user = "******" #password = "******" #host = "172.20.0.220" #port = 5671 #vhost = "isnr" #exchange ="bpm" qm = QueueManager(user, password, host, port, vhost, True) # TODO: Add feature for switch local and cloud app setting # === Cloud App === # # Load RabbitMQ from system variable # vcap_services_json = os.getenv('VCAP_SERVICES') # vcap_services = json.loads(vcap_services_json) # # Cannot get RabbitMQ settings # if 'p-rabbitmq' not in vcap_services and len(vcap_services['p-rabbitmq']) < 1: # sys.exit('Error: cannot get RabbitMQ settings!') # rabbitmq_uri = vcap_services['p-rabbitmq'][0]['credentials']['protocols']['amqp']['uri'] # initial queues init_queue(qm, exchange) # TODO: Split main function for oop # # IBMMQ Variables queue_manager = os.getenv('IBMQUEUE_MANAGER') channel = os.getenv("IBMCHANNEL") host = os.getenv("IBMHOST") port = os.getenv("IBMPORT") queue_name = os.getenv("IBMQUEUE_NAME") user = os.getenv('IBMUSER') password = os.getenv('IBMPASSWORD') #queue_manager = "TIABPM" #channel = "TAISNR.SVRCONN" #host = "10.2.67.76" #port = "1414" #queue_name = "TAISNR.LOCAL.QUEUE" #user = '******' #password = '******' #routing_key = 'bpm_rk' conn_info = "%s(%s)" % (host, port) pymqi.queue_manager = queue_manager pymqi.channel = channel pymqi.host = host pymqi.port = port pymqi.queue_name = queue_name pymqi.conn_info = conn_info #qmgr = pymqi.connect(queue_manager, channel, conn_info) #Connecting in client mode with username/password credentials keep_connect = True while keep_connect: try: qmgr = pymqi.connect(queue_manager, channel, conn_info, user, password) printmsg = str(datetime.datetime.now()) + " Connect success !! " keep_connect = False except Exception as pymqierror: printmsg = str(datetime.datetime.now() ) + " Connect fail !! " + str(pymqierror) print(printmsg) with open("Output.txt", "a") as text_file: print(f"{printmsg}", file=text_file) with open("Output1.txt", "a") as text_file: print(f"{printmsg}", file=text_file) time.sleep(10) #Connecting in client mode ibmqueue = pymqi.Queue(qmgr, queue_name) #How to put the message on a queue # try: # message = 'Hello from Python!' # ibmqueue.put(message) # except Exception as identifier: # print(str(identifier)) keep_running = True count = 0 while keep_running: try: #How to get the message off a queue msg_body = ibmqueue.get().decode('utf-16') # routing_key = '{}_rk'.format(api['name'].lower()) # qm.publish(exchange, routing_key, msg_body) printmsg = str( datetime.datetime.now()) + " success!! " + msg_body printmsg1 = str(datetime.datetime.now()) + " success!! " except Exception as pymqierror: printmsg = str( datetime.datetime.now()) + " fail!! " + str(pymqierror) printmsg1 = str( datetime.datetime.now()) + " fail!! " + str(pymqierror) if count > 10: keep_running = False keep_connect = True count = count + 1 print(printmsg) with open("Output.txt", "a") as text_file: print(f"{printmsg}", file=text_file) with open("Output1.txt", "a") as text_file: print(f"{printmsg1}", file=text_file) time.sleep(10) ibmqueue.close() qmgr.disconnect()
def mainfunction(source): """ mainfunction 主要功能:取得BPM資料,並且將Parse語意後的BPM JSON格式資料傳送至Web Server。目前系統支援 由(1)Rabbit MQ 或(2)LOG檔 ,兩種取得BPM來源。 (1) 與Rabbit MQ建立連線通道,監聽等待從Rabbit MQ Server取得原始BPM內文資料,以callback方法啟動IataParsing,並且將Parse語意後的BPM JSON格式資料傳送至Web Server。 除此之外,導入連線例外狀況的訊習提示且中斷服務,由CF 管理系統再自動啟動程式。 SOURCE:'RabbitMQ'-來源是從RabbitMQ Server中取得未Parse語意過的BPM資訊,需要先進行Parse語意,再傳送至Web Server。 (2) 取得SourceLog資料夾中的BPM 記錄文件檔,可運用於系統容錯處理,使用BPM文件記錄檔,補缺少(或需增加)的BPM資訊,再傳送Parse語意後的BPM JSON格式資料傳送至Web Server。 SOURCE:'bpmOrg'-來源是從文件中取得未Parse語意過的BPM資訊,需要先進行Parse語意,再傳送至Web Server。 SOURCE:'bpmJson'-來源是從文件中取得已Parse語意過的BPM資訊,只需要進行傳送至Web Server。 """ if 'RabbitMQ'.lower() in source.lower(): # user = "******" # password = "******" # host = "172.20.0.220" # port = 5671 # vhost = "isnr" # queue_name = "bpm" source = "source:" + source.lower().replace("mq",'MQ').replace("r",'R') DebugLog('',source,source,'0') def callback(ch, method, properties, body): #print(type(body)) #<class 'dict'> data = str(body, 'utf-8') json_str = '' msg = data try: DebugLog('','Get message from RabbitMQ Server!!','GetBpmFomMQ','1') json_str = dataParsing(data) msg = str(json_str) parseStore(json_str) except Exception as error : DebugLog(msg,str(error),'GetBpmFomMQFAIL','0') try: qm = QueueManager(user, password, host, port, vhost, True) channel = qm.channel channel.basic_consume(queue_name, callback ,auto_ack=True) DebugLog('','Connected to RabbitMQ Server and Waiting for messages.','WaitBpmMsg','0') print('Waiting for messages.') channel.start_consuming() except Exception as e : DebugLog('','pika.exceptions: cannot connect to RabbitMQ! '+str(e.args),'Alert_MqFail' ,'0') sys.exit('Error: cannot connect to RabbitMQ!') elif 'bpmOrg'.lower() in source.lower() or 'bpmJson'.lower() in source.lower(): source = "source:" + source.lower() DebugLog('',source,source,'0') try: script_dir = os.path.dirname(__file__) #<-- absolute dir the script is in script_dir = os.path.join(script_dir, '..') script_dir = os.path.join(script_dir, 'SourceLog') entries = os.listdir(script_dir) except Exception as e : DebugLog( '','FileNotFound: ' + str(e.args[1]) + ' - ' + str(script_dir),'FileNotFound','0') sys.exit('Error: FileNotFound!!') else: if not os.path.exists('DoneLog'): try: os.makedirs('DoneLog') except Exception as patherror : print(str(patherror)) DebugLog( '',str(patherror),'MakedirFail','0') sys.exit('Error: Make DoneLog dir fail !!') for entry in entries: try: bopenfile = False sfileBpmList = '' src_file_path = os.path.join(script_dir, entry) dst_file_path = os.path.join(script_dir.replace('SourceLog','DoneLog'), 'Done'+entry) txt = '' DebugLog( '',src_file_path,entry,'0') with open(src_file_path, 'r') as f: txt = f.read() bopenfile = True f.close() if os.path.exists(dst_file_path): #os.remove (dst_file_path) os.rename (dst_file_path, dst_file_path+str(datetime.datetime.now().date())) os.rename (src_file_path, dst_file_path) if 'bpmOrg'.lower() in source.lower(): txt = txt.replace(" BPM",'@BPM') txt = txt.replace("ENDBPM",'ENDBPM@') txt = txt.replace("\r",'') txt = txt.replace("\n",'\r\n') txt = txt.replace("\r\n\r\n",'\r\n') sfileBpmList = txt.split("@", ) except Exception as patherror : DebugLog( '',str(patherror),'Alert_pathError','0') raise patherror else: if sfileBpmList != '' and bopenfile == True : ifileBpmcount = 0 for ifileBpmList in range(len(sfileBpmList)): if 'BPM' in sfileBpmList[ifileBpmList] : ifileBpmcount = ifileBpmcount + 1 json_str = '' data = sfileBpmList[ifileBpmList] try: msg = data if 'Rawdata' not in sfileBpmList[ifileBpmList] : json_str = dataParsing(data) msg = str(json_str) else: json_str = data DebugLog('','Get message from file!! ','GetBpmFomFile' ,'1') parseStore(json_str) except Exception as error : DebugLog(msg,str(error), 'BPMToPortalFail','0') else: print (ifileBpmcount, "sfileBpmList:"+str(len(sfileBpmList)), json_str)
import sys sys.path.insert(0, '../qless/') from QueueManager import QueueManager from FirebaseManager import FirebaseManager queueManager = QueueManager(sync_id_generators=False) firebaseManager = FirebaseManager() # time is all in millis hour = 1000 * 60 * 60 half_hour = 1000 * 60 * 30 # clear firebase data def clear_data(): print("clearing all data...") firebaseManager.update_queue("walk_in", []) print(" walk_in cleared") firebaseManager.update_queue("doctor_hudson", []) print(" doctor_hudson queue cleared") firebaseManager.update_queue("doctor_martin", []) print(" doctor_martin queue cleared") firebaseManager.update_now_paging([]) print(" now paging cleared") firebaseManager.update_seen_users([]) print(" seen users cleared") firebaseManager.update_users([]) print(" users cleared") print("all data cleared")
class Collection(object): def __init__(self, collection, queue=None, **kwargs): self.collection = collection self.runable = False self._queue_set = queue self.kwargs = kwargs def initialize(self, queue, lsize=50, timeout=60, **kwargs): if not queue: self.QM = QueueManager("0.0.0.0", port=9998) self.queue = self.QM.Queue(queue_type="python_queue", **kwargs) else: self.QM = QueueManager("0.0.0.0", port=9998) try: self.queue = self.QM.Queue(queue_type=queue, **kwargs) except Exception as e: print "queue type: `python_queue`, `redis_queue`" import traceback traceback.print_exc() self.asyn_collection = None self.lsize = lsize self.timeout = timeout self.l_list = [] #插入任务 self.u_list = [] #更新任务 #clear all old queue data while self.queue.qsize(): _ = self.queue.get() #set status running self.runable = True self.t = threading.Thread(target=self._run_single) self.t.start() @staticmethod def _get_dict(ob): if not hasattr(ob, "_origin"): return ob.__dict__ else: dict1 = dict(ob.__dict__) dict1.pop("_origin") return dict1 @staticmethod def _get_update_and_remove_dict(ob): origin_hash = ob._origin delattr(ob, "_origin") now_hash = hash_object(ob) set_origin_hash_key = set(origin_hash.keys()) set_now_hash_key = set(now_hash.keys()) #get remove dict remove_key = set_origin_hash_key - set_now_hash_key remove_dict = dict() for i in remove_key: remove_dict[i] = 1 #get update dict eq_key = set_origin_hash_key & set_now_hash_key update_dict = dict() for i in eq_key: if origin_hash[i] != now_hash[i]: update_dict[i] = getattr(ob, i) add_key = set_now_hash_key - set_origin_hash_key for i in add_key: update_dict[i] = getattr(ob, i) return [update_dict, remove_dict] def set_collection(self, client, db, collection): self.collection = client.get_database(db).get_collection(collection) def qsize(self): if hasattr(self, "queue"): return self.queue.qsize() else: raise Exception("not use asyn feature, have no queue") def insert(self, ob): # 同步插入 self.collection.insert_one(self._get_dict(ob)) def insert_asyn(self, ob, lsize=50, timeout=60): # 异步插入 #lazy initialize asyn if not self.runable: self.initialize(queue=self._queue_set, lsize=lsize, timeout=timeout, **self.kwargs) self.queue.put([self.collection, "insert", ob]) def update(self, ob): # 更新 if not hasattr(ob, "_id"): raise Exception("not a normal mongo item") self._real_update(ob) def update_asyn(self, ob, lsize=50, timeout=60): # 异步更新 #lazy initialize asyn if not self.runable: self.initialize(queue=self._queue_set, lsize=lsize, timeout=timeout, **self.kwargs) self.queue.put([self.collection, "update", ob]) def find(self, json=dict(), item=dict(), limit=0, skip=0): # 查询,返回对象generator if not limit: if not item: result = self.collection.find(json).skip(skip) else: result = self.collection.find(json, item).skip(skip) else: if not item: result = self.collection.find(json).skip(skip).limit(limit) else: result = self.collection.find(json, item).skip(skip).limit(limit) if not result: yield None else: for item in result: yield obj(**item) def find_one(self, json=dict(), item=dict()): # find a item, return a object if not item: result = self.collection.find_one(json) else: result = self.collection.find_one(json, item) if not result: return None else: return obj(**result) def close(self): # not use asyn if not self.runable: pass #use asyn else: while self.queue.qsize(): # 等待未完成任务 time.sleep(0.2) if self.runable: self.queue.put("X") if self.t: self.t.join() if hasattr(self, 'QM'): self.QM.shutdown() return else: return else: return def _run_last(self): # 执行上个循环任务 if self.l_list: # 插入 self._real_insert_asyn(self.l_list) self.l_list = [] else: pass if self.u_list: # 更新 for ob in self.u_list: self._real_update(ob) self.u_list = [] else: pass def _get_size(self): if self.queue.qsize() > self.lsize: # 获取这个队列中的大小 size = self.lsize else: if self.queue.qsize(): size = self.queue.qsize() else: size = 1 # 如果队列为空,只要下次有数据插入(队列大于一),就会被捕获,激活线程 return size def _run_single(self): while self.runable: self._run_last() size = self._get_size() for i in xrange(size): try: item = self.queue.get(timeout=self.timeout) if isinstance(item, str): if item == "X": # "X"为停止信号 self.runable = False break elif isinstance(item, list): collection, mark, ob = item #第一位为集合,第二位为标志位,第三位为对象 if i == 0: #第一次运行循环,设置异步collection self.asyn_collection = collection if self.asyn_collection == collection: #保证一个循环的collection是相同的 if mark == "insert": self.l_list.append(self._get_dict(ob)) elif mark == "update": self.u_list.append(ob) else: #如果不同,把元素放回去 self.queue.put_left(item) break else: raise Exception("Error Queue message:\t" + item) except Empty: self.runable = False break def _real_insert_asyn(self, l_list): self.collection.insert_many(l_list) def _real_update(self, ob): if not hasattr(ob, "_id"): raise Exception("not a normal mongo item") elif not hasattr(ob, "_origin"): raise Exception( "have no _origin data, can't use update_asyn callable") update_dict, remove_dict = self._get_update_and_remove_dict(ob) if update_dict and remove_dict: self.collection.update_one({"_id": ob._id}, { "$set": update_dict, "$unset": remove_dict }) elif update_dict: self.collection.update_one({"_id": ob._id}, {"$set": update_dict}) elif remove_dict: self.collection.update_one({"_id": ob._id}, {"$unset": remove_dict})
elif event.type == 'Unfollow': if (event.to_id in self.server.follows and event.from_id in self.server.follows[event.to_id]): self.server.follows[event.to_id].remove(event.from_id) elif event.type == 'Broadcast': event.notify = list(self.server.users.keys()) elif event.type == 'Status': if event.from_id in self.server.follows: event.notify = self.server.follows[event.from_id] """ QueueManager.addEvent(event) class UnknownEvent(Exception): pass class Event(object): events = { 'F': 'Follow', 'U': 'Unfollow', 'B': 'Broadcast', 'P': 'Private', 'S': 'Status'
def setup(client: QueueManager): client.add_cog(CommandsCog(client))
def validParams(): params = QueueManager.validParams() params.addParam('queue_template', os.path.join(os.path.abspath(os.path.dirname(__file__)), 'pbs_template'), "Location of the PBS template") return params
class SVService: def __init__(self): inQueueManager = QueueManager(address=('/tmp/SVSqueue'), \ authkey='gnarf') inQueueManager.connect() self.inQueue = inQueueManager.get_queue() self.handleMsg = msgHandler(self.logfunc) self.handleMsg.register('init', self.do_init) self.handleMsg.register('addClient', self.do_addClient) self.handleMsg.register('peerQuit', self.do_peerQuit) self.handleMsg.register('exit', self.do_exit) self.handleMsg.register('procDead', self.do_procDead) self.handleMsg.register('chatLobby', self.do_chatLobby) self.handleMsg.register('nameProposed', self.do_nameProposed) self.handleMsg.register('PINGsent', self.do_PINGsent) self.handleMsg.register('PINGreceived', self.do_PINGreceived) self.handleMsg.register('PONGreceived', self.do_PONGreceived) self.handleMsg.register('getUserList', self.do_getUserList) self.handleMsg.register('chatPrivate', self.do_chatPrivate, 3, False) self.handleMsg.register('shutdown', self.do_shutdown) self.handleMsg.register('syslog', self.do_syslog, 3, False) self.clients = {} self.lastClient = 0 self.SVG = SVGhandler() self.init = False self.keep_alive = True def __del__(self): del self.inQueue del self.handleMsg del self.clients #if self.MDproc.is_alive(): #self.sendClient(self.MDqueuepath, 'bye', 'system shutdown') self.MDproc.join() self.mqm.shutdown() def cycle(self): msg = self.inQueue.get() # blocking get self.performInQueueMsg(msg) if msg[0] == 'bye': print 'SVService: Bye' return False return self.keep_alive def sendClient(self, ID, msgtag, text, obj=None): msg = (msgtag, text, obj) self.clients[ID]['queue'].put((msgtag, text, obj)) def getSenderInfo(self, ID): return {'name': self.clients[ID]['name'], 'ID': ID} def removeClient(self, clientID, msg): clientName = self.clients[clientID]['name'] del self.clients[clientID]['name'] del self.clients[clientID]['queue'] del self.clients[clientID]['state'] del self.clients[clientID] for ID in self.clients.keys(): if not self.clients[ID]['name'][0] == '_': self.sendClient(ID, 'userLeft', msg, clientID) def performInQueueMsg(self, msg): if not self.handleMsg.performMsg(msg): print '[SVService] unknown cmd: ' + msg[0] def logfunc(self, msg): self.sendClient(self.MDqueuepath, 'syslog', 'event from SVService', str(msg)) def do_init(self, param): if not self.init: self.MDqueuepath = '/tmp/MDqueue' self.mqm = QueueManager(address=(self.MDqueuepath), \ authkey='gnarf') self.mqm.start() self.MDqueue = self.mqm.get_queue() self.clients[self.MDqueuepath] = { \ 'name': 'Majordomo', 'queue': self.MDqueue, 'state': 'master'} self.MDproc = multiprocessing.Process( \ target=Majordomo.processWorker, args=[]) self.MDproc.start() self.init = True def do_addClient(self, param): self.lastClient += 1 queuepath = param[2] # we use path to named pipe as client id m = QueueManager(address=(queuepath), authkey='gnarf') m.connect() queue = m.get_queue() self.clients[queuepath] = { \ 'name': '_SVuser' + str(self.lastClient), 'queue': queue, 'state': 'init'} def do_peerQuit(self, param): clientID = param[2] if clientID in self.clients: self.removeClient(clientID, 'peer disconnected') def do_exit(self, param): clientID = param[2] if clientID in self.clients: self.removeClient(clientID, 'user left us') def do_procDead(self, param): clientID = param[2] if clientID in self.clients: self.removeClient(clientID, \ 'process dead - this should never happen') def do_chatLobby(self, param): text = param[1] senderID = param[2] for ID in self.clients.keys(): self.sendClient(ID, 'lobbyMsg', text, \ self.getSenderInfo(senderID)) def do_nameProposed(self, param): proposedName = param[1] clientID = param[2] if proposedName in [x['name'] for x in self.clients.values()]: self.sendClient(clientID, 'clientNameDenied', \ 'a user with this name is already online') elif len(proposedName) > 15: self.sendClient(clientID, 'clientNameDenied', \ 'name too long') elif len(proposedName) < 3: self.sendClient(clientID, 'clientNameDenied', \ 'name too short') elif proposedName[0] == '_': self.sendClient(clientID, 'clientNameDenied', \ 'name must not begin with an underscore') elif ':' in proposedName or ' ' in proposedName: self.sendClient(clientID, 'clientNameDenied', \ 'name must not contain : or space') else: self.clients[clientID]['name'] = proposedName self.sendClient(clientID, 'clientNameAccepted', \ proposedName) for sink in self.clients.keys(): if not self.clients[sink]['name'][0] == '_': self.sendClient(sink, 'userJoined', 'user joined', \ self.getSenderInfo(clientID)) def do_PINGsent(self, param): pass def do_PINGreceived(self, param): clientID = param[2] self.sendClient(clientID, 'sendPONG', 'send PONG to peer') def do_PONGreceived(self, param): pass def do_getUserList(self, param): clientID = param[2] userList = [self.getSenderInfo(x) \ for x in self.clients.keys() if (not self.clients[x]['name'][0] == '_') or (clientID == self.MDqueuepath)] self.sendClient(clientID, 'userList', '', userList) def do_chatPrivate(self, param): msg = param[1] sourceID, sinkID = param[2] self.sendClient(sinkID, 'privateMsg', msg, \ self.getSenderInfo(sourceID)) def do_shutdown(self, param): self.sendClient(self.MDqueuepath, 'bye', 'system shutdown') self.keep_alive = False def do_syslog(self, param): msgtag, msg, userID = param self.sendClient(self.MDqueuepath, 'syslog', 'event from ' + userID, msg)
def __init__(self, queuepath, sock): self.ID = queuepath outQueueManager = QueueManager(address=('/tmp/SVSqueue'), \ authkey='gnarf') outQueueManager.connect() inQueueManager = QueueManager(address=(queuepath), \ authkey='gnarf') inQueueManager.connect() self.inQueue = inQueueManager.get_queue() self.outQueue = outQueueManager.get_queue() self.sock = sock self.inBuffer = '' self.handleMsg = msgHandler(self.logfunc) self.handleMsg.register('sock', self.do_sock) self.handleMsg.register('lobbyMsg', self.do_lobbyMsg) self.handleMsg.register('clientNameAccepted', \ self.do_clientNameAccepted) self.handleMsg.register('clientNameDenied', \ self.do_clientNameDenied) self.handleMsg.register('sendPONG', self.do_sendPONG) self.handleMsg.register('userList', self.do_userList) self.handleMsg.register('bye', self.do_bye) self.handleMsg.register('privateMsg', self.do_privateMsg, 3, False) self.handleMsg.register('userLeft', self.do_userLeft) self.handleMsg.register('userJoined', self.do_userJoined) self.handleProtIn = msgHandler() self.handleProtIn.register(210, self.do_210_in_proposeName, 2) self.handleProtIn.register(401, self.do_401_in_listRooms, 1) self.handleProtIn.register(406, self.do_406_in_newRoom, 1) self.handleProtIn.register(501, self.do_501_in_chatLobby) self.handleProtIn.register(502, self.do_502_in_chatPrivate) self.handleProtIn.register(505, self.do_505_in_getUserList, 1) self.handleProtIn.register(602, self.do_602_in_ackProt, 2) self.handleProtIn.register(603, self.do_603_in_PING, 1) self.handleProtIn.register(604, self.do_604_in_PONG, 1) self.handleProtIn.register(699, self.do_699_in_exit, 1) self.handleProtOut = msgHandler() self.handleProtOut.register(101, self.do_ProtOut_trivial) self.handleProtOut.register(302, self.do_ProtOut_trivial) self.handleProtOut.register(303, self.do_ProtOut_trivial) self.handleProtOut.register(305, self.do_ProtOut_trivial) self.handleProtOut.register(306, self.do_ProtOut_trivial) self.handleProtOut.register(503, self.do_503_out_chatLobby) self.handleProtOut.register(504, self.do_504_out_chatPrivate) self.handleProtOut.register(506, self.do_ProtOut_trivial) self.handleProtOut.register(507, self.do_507_out_userEntry) self.handleProtOut.register(508, self.do_ProtOut_trivial) self.handleProtOut.register(509, self.do_509_out_userJoined) self.handleProtOut.register(510, self.do_510_out_userLeft) self.handleProtOut.register(601, self.do_601_out_serverHello) self.handleProtOut.register(603, self.do_603_out_PING) self.handleProtOut.register(604, self.do_ProtOut_trivial) self.handleProtOut.register(697, self.do_ProtOut_trivial) self.FSMsymbols = {} self.FSM = StateMachine(self.FSMsymbols) self.FSM.add_state('init', self.st_init) self.FSM.set_state('init') self.FSM.add_state('prot_proposed', self.st_prot_proposed) self.FSM.add_state('prot_accepted', self.st_prot_accepted) self.FSM.add_state('name_asked', self.st_name_asked) self.FSM.add_state('online', self.st_online) self.userList = [] self.firstCycle = True self.keep_alive = True
class Collection(object): def __init__(self, collection, queue = None, **kwargs): self.collection = collection self.runable = False self._queue_set = queue self.kwargs = kwargs def initialize(self, queue, lsize=50, timeout=60, **kwargs): if not queue: self.QM = QueueManager("0.0.0.0", port = 9998) self.queue = self.QM.Queue(queue_type="python_queue", **kwargs) else: self.QM = QueueManager("0.0.0.0", port = 9998) try: self.queue = self.QM.Queue(queue_type=queue, **kwargs) except Exception as e: print "queue type: `python_queue`, `redis_queue`" import traceback traceback.print_exc() self.asyn_collection = None self.lsize = lsize self.timeout = timeout self.l_list = [] #插入任务 self.u_list = [] #更新任务 #clear all old queue data while self.queue.qsize(): _ = self.queue.get() #set status running self.runable = True self.t = threading.Thread(target=self._run_single) self.t.start() @staticmethod def _get_dict(ob): if not hasattr(ob,"_origin"): return ob.__dict__ else: dict1 = dict(ob.__dict__) dict1.pop("_origin") return dict1 @staticmethod def _get_update_and_remove_dict(ob): origin_hash = ob._origin delattr(ob, "_origin") now_hash = hash_object(ob) set_origin_hash_key = set(origin_hash.keys()) set_now_hash_key = set(now_hash.keys()) #get remove dict remove_key = set_origin_hash_key - set_now_hash_key remove_dict = dict() for i in remove_key: remove_dict[i] = 1 #get update dict eq_key = set_origin_hash_key & set_now_hash_key update_dict = dict() for i in eq_key: if origin_hash[i] != now_hash[i]: update_dict[i] = getattr(ob, i) add_key = set_now_hash_key - set_origin_hash_key for i in add_key: update_dict[i] = getattr(ob, i) return [update_dict, remove_dict] def set_collection(self, client, db, collection): self.collection = client.get_database(db).get_collection(collection) def qsize(self): if hasattr(self, "queue"): return self.queue.qsize() else: raise Exception("not use asyn feature, have no queue") def insert(self, ob): # 同步插入 self.collection.insert_one(self._get_dict(ob)) def insert_asyn(self, ob, lsize=50, timeout=60): # 异步插入 #lazy initialize asyn if not self.runable: self.initialize(queue=self._queue_set, lsize=lsize, timeout=timeout, **self.kwargs) self.queue.put([self.collection, "insert", ob]) def update(self, ob): # 更新 if not hasattr(ob, "_id"): raise Exception("not a normal mongo item") self._real_update(ob) def update_asyn(self, ob, lsize=50, timeout=60): # 异步更新 #lazy initialize asyn if not self.runable: self.initialize(queue=self._queue_set, lsize=lsize, timeout=timeout, **self.kwargs) self.queue.put([self.collection, "update", ob]) def find(self, json = dict(), item= dict(), limit=0, skip=0): # 查询,返回对象generator if not limit: if not item: result = self.collection.find(json).skip(skip) else: result = self.collection.find(json, item).skip(skip) else: if not item: result = self.collection.find(json).skip(skip).limit(limit) else: result = self.collection.find(json, item).skip(skip).limit(limit) if not result: yield None else: for item in result: yield obj(**item) def find_one(self, json = dict(), item= dict()): # find a item, return a object if not item: result = self.collection.find_one(json) else: result = self.collection.find_one(json, item) if not result: return None else: return obj(**result) def close(self): # not use asyn if not self.runable: pass #use asyn else: while self.queue.qsize(): # 等待未完成任务 time.sleep(0.2) if self.runable: self.queue.put("X") if self.t: self.t.join() if hasattr(self, 'QM'): self.QM.shutdown() return else: return else: return def _run_last(self): # 执行上个循环任务 if self.l_list: # 插入 self._real_insert_asyn(self.l_list) self.l_list = [] else: pass if self.u_list: # 更新 for ob in self.u_list: self._real_update(ob) self.u_list = [] else: pass def _get_size(self): if self.queue.qsize() > self.lsize: # 获取这个队列中的大小 size = self.lsize else: if self.queue.qsize(): size = self.queue.qsize() else: size = 1 # 如果队列为空,只要下次有数据插入(队列大于一),就会被捕获,激活线程 return size def _run_single(self): while self.runable: self._run_last() size = self._get_size() for i in xrange(size): try: item = self.queue.get(timeout=self.timeout) if isinstance(item, str): if item == "X": # "X"为停止信号 self.runable = False break elif isinstance(item, list): collection, mark, ob = item #第一位为集合,第二位为标志位,第三位为对象 if i == 0: #第一次运行循环,设置异步collection self.asyn_collection = collection if self.asyn_collection == collection: #保证一个循环的collection是相同的 if mark == "insert": self.l_list.append(self._get_dict(ob)) elif mark == "update": self.u_list.append(ob) else: #如果不同,把元素放回去 self.queue.put_left(item) break else: raise Exception("Error Queue message:\t" + item) except Empty: self.runable = False break def _real_insert_asyn(self, l_list): self.collection.insert_many(l_list) def _real_update(self, ob): if not hasattr(ob, "_id"): raise Exception("not a normal mongo item") elif not hasattr(ob, "_origin"): raise Exception("have no _origin data, can't use update_asyn callable") update_dict, remove_dict = self._get_update_and_remove_dict(ob) if update_dict and remove_dict: self.collection.update_one({"_id": ob._id}, {"$set": update_dict, "$unset" : remove_dict}) elif update_dict: self.collection.update_one({"_id": ob._id}, {"$set": update_dict}) elif remove_dict: self.collection.update_one({"_id": ob._id}, {"$unset" : remove_dict})