def pack(self, values_str, separator=',', has_tag_name=True): #pack header header=(self.message_type, self.body_struct.size) buff=self.header_struct.pack(*header) #pack body value_list=values_str.split(separator) if len(value_list)!=self.field_cnt: #TODO:return error LogUtil.error("Error Format:"+str(self.message_type)+":"+str(len(value_list))+":"+str(self.field_cnt)) #return target_value_list=[] for field_index in range(0, self.field_cnt): if has_tag_name: field_value=value_list[field_index].split('=')[1] else: field_value=value_list[field_index] if self.field_list[field_index][2]=='C': target_value=str.encode(field_value.ljust(self.field_list[field_index][3])) elif self.field_list[field_index][2]=='N': target_value=int(field_value) else: target_value=str.encode(field_value.ljust(self.field_list[field_index][3])) target_value_list.append(target_value) buff=buff+self.body_struct.pack(*target_value_list) check_sum = calculate_check_sum(buff, self.header_struct.size+self.body_struct.size) buff=buff+self.footer_struct.pack(check_sum) return buff
def load_from_db(self, db_file, message_type_list): conn=sqlite3.connect(db_file) curs=conn.cursor() if len(message_type_list)==0: curs.execute('select distinct message_type from message_body_def order by message_type') for row in curs.fetchall(): message_type_list.append(row[0]) query='''select t.message_type,t.field_name,t.field_desc,t.format_string,t.type_category,t.type_len,t.ref_field,t.field_order,t.struct_tag from vw_message_body_def t where t.message_type=? order by t.field_order ''' for message_type in message_type_list: field_list=[] field_cnt=0 curs.execute(query, [message_type]) for row in curs.fetchall(): field_cnt=field_cnt+1 LogUtil.info(row) if row[7]!=field_cnt-1: #TODO:raiseExceptions LogUtil.error("field list disorder:mesType="+str(message_type)+" field:"+str(field_cnt)) field_list.append((row[1], row[3], row[4], row[5], row[6], row[8])) message_processor=MessageProcessor(message_type, field_list) self.message_processors[message_type]=message_processor
def replyFinished(self, reply): if (reply.error()): LogUtil.error(reply.errorString()) else: file = QFile(self.fileName) if file.open(QIODevice.WriteOnly): file.write(reply.readAll()) file.flush() file.close() reply.deleteLater() reply.close()
def unpack_fixedlen(self, buff, separator=',', write_tag_name=True): message=Message() message.separator=separator message.has_tag_name=write_tag_name message.message_type=self.message_type (message_type, body_len)=self.header_struct.unpack_from(buff) if message_type!=self.message_type: #TODO:error format check LogUtil.error("Error message_type,expected="+str(self.message_type)+",act_type="+str(message_type)) return #TODO:check buff size #TODO:check checksum body_buff=buff[self.header_struct.size:] bytes_processed=0 body_tuple=self.body_struct.unpack_from(body_buff) if len(body_tuple)!=self.field_cnt: return rtn_str='' for field_index in range(len(body_tuple)): if field_index>0: rtn_str=rtn_str+separator if write_tag_name: rtn_str=rtn_str+self.field_list[field_index][0]+'=' if self.field_list[field_index][2]=='C': str_value=bytes.decode(body_tuple[field_index]) elif self.field_list[field_index][2]=='N': str_value=str(body_tuple[field_index]) else: str_value=bytes.decode(body_tuple[field_index]) bytes_processed=bytes_processed+self.field_list[field_index][3] rtn_str=rtn_str+str_value #TODO add a column to message_body_def to indicate whether the column is client Order ID if self.field_list[field_index][0]=='ClOrdID': message.client_order_id=bytes.decode(body_tuple[field_index]) elif self.field_list[field_index][0]=='OrdStatus': message.order_status=bytes.decode(body_tuple[field_index]) elif self.field_list[field_index][0]=='OrdRejReason': message.order_reject_reason=(body_tuple[field_index]) message.message_str=rtn_str if bytes_processed!=body_len: LogUtil.error("bytes_process!=body_len,mesType="+str(message_type)+",body_len="+str(body_len)+",bytes_processed="+str(bytes_processed)) return message
def tgw_recv(name, status, sock, resp_queue, config_file): LogUtil.get_instance(config_file, "tgw_recv") LogUtil.info("tgw_recv:"+name+" begin") while status.value==0: try: recv_data = sock.recv(1024) if not recv_data: LogUtil.error('Recv message error!') else: LogUtil.debug('tgw recv:'+binascii.hexlify(recv_data).decode()) #to make the recv faster, do NOT process more, just put the message to the queue resp_queue.put(recv_data) finally: pass #LogUtil.debug("") LogUtil.info("tgw_recv:"+name+" end")
def mdgw_recv(name, status, sock, resp_queue, config_file): LogUtil.get_instance(config_file, "mdgw_recv") LogUtil.info("mdgw_recv:"+name+" begin") buf_id = 0 while status.value == 0: try: recv_data = sock.recv(1024) if not recv_data: LogUtil.error('Recv message error!') else: buf_id = buf_id+1 src_time = datetime.datetime.now() # to make the recv faster, do NOT process more, just put the message to the queue resp_queue.put((buf_id, src_time, recv_data)) LogUtil.debug('mdgw recv:'+binascii.hexlify(recv_data).decode()) finally: pass # LogUtil.debug("") LogUtil.info("mdgw_recv:"+name+" end")
def unpack_nested(self, buff, separator=',', write_tag_name=True): message=Message() message.separator=separator message.has_tag_name=write_tag_name message.message_type=self.message_type (message_type, body_len)=self.header_struct.unpack_from(buff) if message_type!=self.message_type: #TODO:error format check LogUtil.error("Error message_type,expected="+str(self.message_type)+",act_type="+str(message_type)) return #TODO:check buff size #TODO:check checksum body_left_buff=buff[self.header_struct.size:] bytes_processed=0 #body_tuple=self.body_struct.unpack_from(body_buff) str_value_list=[] rtn_str='' #field_list: #0:field_name; #1:format_str: #2:type,eg N,C; #3:type_len; #4:ref_field #5:struct_tag field_index=0 #support we enounter a body begin tag (loop_cnt_field, loop_cnt, exec_cnt)=(0, 1, 1) (var_len_indictor_field, var_len)=(0, 0) loop_list=[] while field_index<len(self.field_list) and self.field_list[field_index][5]!='BODY_END': if self.field_list[field_index][5] in('DATA', 'LOOP_CNT', 'NEXT_VAR_LEN', 'VAR_LEN'): if field_index>0: rtn_str=rtn_str+separator if write_tag_name: rtn_str=rtn_str+self.field_list[field_index][0]+'=' if self.field_list[field_index][5]=='VAR_LEN': #self.field_list[field_index][4]-1 is the index of field which holding the length if self.field_list[field_index][4]!=var_len_indictor_field: LogUtil.error("Error getting VAR_LEN:field_index="+str(field_index)+" indict field="+str(var_len_indictor_field)) format_str="!"+str(var_len)+"s" bytes_len=var_len LogUtil.debug("field:"+self.field_list[field_index][0]+" format:"+format_str) else: format_str="!"+self.field_list[field_index][1] bytes_len=self.field_list[field_index][3] LogUtil.debug("field:"+self.field_list[field_index][0]+" format:"+format_str) field_value=struct.unpack_from(format_str, body_left_buff) if self.field_list[field_index][5]=='NEXT_VAR_LEN': var_len=field_value[0] var_len_indictor_field=field_index if self.field_list[field_index][2]=='C': field_str_value=bytes.decode(field_value[0]) elif self.field_list[field_index][2]=='N': field_str_value=str(field_value[0]) else: field_str_value=bytes.decode(field_value[0]) LogUtil.error("unknown type category:"+self.field_list[field_index][2]) str_value_list.append(field_str_value) rtn_str=rtn_str+field_str_value #TODO add a column to message_body_def to indicate whether the column is client Order ID if self.field_list[field_index][0]=='ClOrdID': message.client_order_id=field_str_value elif self.field_list[field_index][0]=='OrdStatus': message.order_status=field_str_value elif self.field_list[field_index][0]=='OrdRejReason': message.order_reject_reason=field_value[0] bytes_processed=bytes_processed+bytes_len body_left_buff=body_left_buff[bytes_len:] if self.field_list[field_index][5]=='LOOP_CNT': loop_list.append((field_index, loop_cnt, exec_cnt)) LogUtil.debug("push:field_index="+str(field_index)+ ",loop="+str(loop_cnt)+ ",exec="+str(exec_cnt)) loop_cnt=field_value[0] exec_cnt=0 field_index=field_index+1 elif self.field_list[field_index][5] in('LOOP_BEGIN'): if exec_cnt<loop_cnt: exec_cnt=exec_cnt+1 field_index=field_index+1 else: #end loop, move index to LOOP_END+1 (loop_cnt_field, loop_cnt, exec_cnt)=loop_list.pop() LogUtil.debug("pop:field_index="+str(loop_cnt_field)+",loop="+str(loop_cnt)+ ",exec="+str(exec_cnt)) field_index=self.field_list[field_index][4] elif self.field_list[field_index][5] in('LOOP_END'): #goto LOOP BEGIN(which is saved in ref_field) field_index=self.field_list[field_index][4] if exec_cnt>loop_cnt: LogUtil.critical("LOOP_END error: exec_cnt="+str(exec_cnt)+",loop_cnt="+str(loop_cnt)) else: #TODO:raiseExceptions LogUtil.error("unsupported struct tag:"+self.field_list[field_index][5]) break message.message_str=rtn_str if bytes_processed!=body_len: LogUtil.error("bytes_process!=body_len,mesType="+str(message_type)+",body_len="+str(body_len)+",bytes_processed="+str(bytes_processed)) return message
def unpack_varlen(self, buff, separator=',', write_tag_name=True): message=Message() message.separator=separator message.has_tag_name=write_tag_name message.message_type=self.message_type (message_type, body_len)=self.header_struct.unpack_from(buff) if message_type!=self.message_type: #TODO:error format check LogUtil.error("Error message_type,expected="+str(self.message_type)+",act_type="+str(message_type)) return #TODO:check buff size #TODO:check checksum bytes_processed=0 body_left_buff=buff[self.header_struct.size:] #body_tuple=self.body_struct.unpack_from(body_buff) str_value_list=[] rtn_str='' #field_list: #0:field_name; #1:format_str: #2:type,eg N,C; #3:type_len; #4:ref_var_len_field for field_index in range(len(self.field_list)): if field_index>0: rtn_str=rtn_str+separator if write_tag_name: rtn_str=rtn_str+self.field_list[field_index][0]+'=' if self.field_list[field_index][4]>0: #self.field_list[field_index][4] is the index of field which holding the length format_str="!"+str_value_list[self.field_list[field_index][4]]+"s" bytes_len=int(str_value_list[self.field_list[field_index][4]]) LogUtil.debug("field:"+self.field_list[field_index][0]+" format:"+format_str) else: format_str="!"+self.field_list[field_index][1] bytes_len=self.field_list[field_index][3] LogUtil.debug("field:"+self.field_list[field_index][0]+" format:"+format_str) field_value=struct.unpack_from(format_str, body_left_buff) if self.field_list[field_index][2]=='C': field_str_value=bytes.decode(field_value[0]) elif self.field_list[field_index][2]=='N': field_str_value=str(field_value[0]) else: field_str_value=bytes.decode(field_value[0]) str_value_list.append(field_str_value) rtn_str=rtn_str+field_str_value #TODO add a column to message_body_def to indicate whether the column is client Order ID if self.field_list[field_index][0]=='ClOrdID': message.client_order_id=field_str_value elif self.field_list[field_index][0]=='OrdStatus': message.order_status=field_str_value elif self.field_list[field_index][0]=='OrdRejReason': message.order_reject_reason=field_value bytes_processed=bytes_processed+bytes_len body_left_buff=body_left_buff[bytes_len:] message.message_str=rtn_str if bytes_processed!=body_len: LogUtil.error("bytes_process!=body_len,mesType="+str(message_type)+",body_len="+str(body_len)+",bytes_processed="+str(bytes_processed)) return message
def run(self): self.dialog.pushButtonBufRecvStart.setEnabled(False) self.dialog.pushButtonBufRecvStop.setEnabled(True) LogUtil.getLoggerInstance(self.config, "buf_recv") LogUtil.info("buf_recv:"+" begin") config=configparser.ConfigParser() config.read(self.config) write_buff=config.get("buf_recv", "write_buff") if write_buff: md_host=config.get("buf_recv", "md_host") md_database=config.get("buf_recv", "md_database") md_user=config.get("buf_recv", "md_user") md_password=config.get("buf_recv", "md_password") md_conn=pgdb.connect(database=md_database, host=md_host, user=md_user, password=md_password) md_insert_buf_cursor=md_conn.cursor() #md_insert_buf_sql='''insert into market_data_buff(data_date,insert_time,buff) #values(%(data_date)s,localtimestamp,%(buff)s) #''' md_insert_buf_sql='''insert into market_data_buff(data_date,buff_id,insert_time,buff,src_time) values(%(data_date)s,%(buf_id)s,%(insert_time)s,%(buff)s,%(src_time)s) ''' md_insert_buf_dict={'data_date':0, 'buf_id':0, 'insert_time':None, 'buff':'', 'src_time':None} buf_sub_addr=config.get("buf_recv", "buf_sub_addr") buf_sub_topic=config.get("buf_recv", "buf_sub_topic") ctx=zmq.Context() sock=ctx.socket(zmq.SUB) sock.connect(buf_sub_addr) sock.setsockopt_string(zmq.SUBSCRIBE, buf_sub_topic) bufRecvCnt=0 bufWriteCnt=0 bufUpdateTime=None bufErrCnt=0 bufStatus='Running' recvBufStatus=RecvBufStatus() while not self.toStop: try: (buf_id, src_time, recv_buff)=sock.recv_pyobj() bufRecvCnt=bufRecvCnt+1 bufUpdateTime=datetime.datetime.now().strftime('%H:%M:%S.%f') if write_buff: md_insert_buf_dict['data_date']=0 md_insert_buf_dict['insert_time']=datetime.datetime.now() md_insert_buf_dict['buf_id']=buf_id md_insert_buf_dict['buff']=binascii.hexlify(recv_buff).decode() #msg=recv_buff.decode() md_insert_buf_dict['src_time']=src_time md_insert_buf_cursor.execute(md_insert_buf_sql, md_insert_buf_dict) #TODO md_conn.commit() bufWriteCnt=bufWriteCnt+1 #self.dialog.bufWriteCnt.setText(str(bufWriteCnt)) except Exception as e: bufErrCnt=bufErrCnt+1 #self.dialog.bufErrCnt.setText(str(bufErrCnt)) LogUtil.error(e) LogUtil.error("BufRecvErr:bufErrCnt="+str(bufErrCnt)+",bufRecvCnt="+str(bufRecvCnt)+",bufWriteCnt="+str(bufWriteCnt)) finally: pass #TODO recvBufStatus.bufRecvCnt=str(bufRecvCnt) recvBufStatus.bufWriteCnt=str(bufWriteCnt) recvBufStatus.bufErrCnt=str(bufErrCnt) recvBufStatus.bufUpdateTime=bufUpdateTime recvBufStatus.bufStatus=bufStatus self.bufStatusUpdated.emit(recvBufStatus) LogUtil.debug("BufRecvErr:bufErrCnt="+str(bufErrCnt)+",bufRecvCnt="+str(bufRecvCnt)+",bufWriteCnt="+str(bufWriteCnt)) self.dialog.pushButtonBufRecvStart.setEnabled(True) self.dialog.pushButtonBufRecvStop.setEnabled(False)
def run(self): self.dialog.pushButtonMsgRecvStart.setEnabled(False) self.dialog.pushButtonMsgRecvStop.setEnabled(True) LogUtil.getLoggerInstance(self.config, "msg_recv") LogUtil.info("msg_recv:"+" begin") config=configparser.ConfigParser() config.read(self.config) write_msg=config.get("msg_recv", "write_msg") if write_msg: md_host=config.get("msg_recv", "md_host") md_database=config.get("msg_recv", "md_database") md_user=config.get("msg_recv", "md_user") md_password=config.get("msg_recv", "md_password") md_conn=pgdb.connect(database=md_database, host=md_host, user=md_user, password=md_password) md_insert_msg_cursor=md_conn.cursor() md_insert_msg_sql='''insert into market_data_message(data_date,insert_time,message_type,message_id,message_content,src_time) values(%(data_date)s,%(insert_time)s,%(message_type)s,%(message_id)s,%(message_content)s,%(src_time)s) ''' md_insert_msg_dict={'data_date':0, 'message_type':0, 'message_content':''} msg_sub_addr=config.get("msg_recv", "msg_sub_addr") msg_sub_topic=config.get("msg_recv", "msg_sub_topic") ctx=zmq.Context() sock=ctx.socket(zmq.SUB) sock.connect(msg_sub_addr) sock.setsockopt_string(zmq.SUBSCRIBE, msg_sub_topic) msgRecvCnt=0 msgWriteCnt=0 msgUpdateTime=None msgErrCnt=0 recvMsgStatus=RecvMsgStatus() msgStatus='Running' while not self.toStop: try: (message_type, message_id, src_time, recv_msg)=sock.recv_pyobj() msgRecvCnt=msgRecvCnt+1 msgUpdateTime=datetime.datetime.now().strftime('%H:%M:%S.%f') if write_msg: md_insert_msg_dict['data_date']=0 md_insert_msg_dict['insert_time']=datetime.datetime.now() md_insert_msg_dict['message_type']=message_type md_insert_msg_dict['message_id']=message_id md_insert_msg_dict['message_content']=recv_msg md_insert_msg_dict['src_time']=src_time md_insert_msg_cursor.execute(md_insert_msg_sql, md_insert_msg_dict) #TODO md_conn.commit() msgWriteCnt=msgWriteCnt+1 except Exception as e: msgErrCnt=msgErrCnt+1 LogUtil.error(e) LogUtil.error("MsgRecvErr:msgErrCnt="+str(msgErrCnt)+",msgRecvCnt="+str(msgRecvCnt)+",msgWriteCnt="+str(msgWriteCnt)) finally: pass recvMsgStatus.msgRecvCnt=str(msgRecvCnt) recvMsgStatus.msgWriteCnt=str(msgWriteCnt) recvMsgStatus.msgErrCnt=str(msgErrCnt) recvMsgStatus.msgUpdateTime=msgUpdateTime recvMsgStatus.msgStatus=msgStatus self.msgStatusUpdated.emit(recvMsgStatus) LogUtil.debug("MsgRecvErr:msgErrCnt="+str(msgErrCnt)+",msgRecvCnt="+str(msgRecvCnt)+",msgWriteCnt="+str(msgWriteCnt)) self.dialog.pushButtonMsgRecvStart.setEnabled(True) self.dialog.pushButtonMsgRecvStop.setEnabled(False)
class LabelUtil(Singleton): _log = None # dataPath def __init__(self): self._log = LogUtil().getlogger() self._log.debug("LabelUtil init") def load_unicode_set(self, unicodeFilePath): self.byChar = {} self.byIndex = {} self.byList = [] self.unicodeFilePath = unicodeFilePath with open(unicodeFilePath, 'rt', encoding='UTF-8') as data_file: self.count = 0 for i, r in enumerate(data_file): ch, inx = r.rsplit(",", 1) self.byChar[ch] = int(inx) self.byIndex[int(inx)] = ch self.byList.append(ch) self.count += 1 def to_unicode(self, src, index): # 1 byte code1 = int(ord(src[index + 0])) index += 1 result = code1 return result, index def convert_word_to_grapheme(self, label): result = [] index = 0 while index < len(label): (code, nextIndex) = self.to_unicode(label, index) result.append(label[index]) index = nextIndex return result, "".join(result) def convert_word_to_num(self, word): try: label_list, _ = self.convert_word_to_grapheme(word) label_num = [] for char in label_list: # skip word if char == "": pass else: label_num.append(int(self.byChar[strQ2B(char)])) # tuple typecast: read only, faster return tuple(label_num) except AttributeError: self._log.error("unicodeSet is not loaded") exit(-1) except KeyError as err: self._log.error("unicodeSet Key not found: %s" % err) exit(-1) def convert_bi_graphemes_to_num(self, word): label_num = [] for char in word: # skip word if char == "": pass else: label_num.append(int(self.byChar[strQ2B( char.decode("utf-8"))])) # tuple typecast: read only, faster return tuple(label_num) def convert_num_to_word(self, num_list): try: label_list = [] for num in num_list: label_list.append(self.byIndex[num]) return ' '.join(label_list) except AttributeError: self._log.error("unicodeSet is not loaded") exit(-1) except KeyError as err: self._log.error("unicodeSet Key not found: %s" % err) exit(-1) def get_count(self): try: return self.count except AttributeError: self._log.error("unicodeSet is not loaded") exit(-1) def get_unicode_file_path(self): try: return self.unicodeFilePath except AttributeError: self._log.error("unicodeSet is not loaded") exit(-1) def get_blank_index(self): return self.byChar["-"] def get_space_index(self): return self.byChar["$"]
class LabelUtil: _log = None # dataPath def __init__(self): self._log = LogUtil().getlogger() self._log.debug("LabelUtil init") def load_unicode_set(self, unicodeFilePath): self.byChar = {} self.byIndex = {} self.unicodeFilePath = unicodeFilePath with open(unicodeFilePath) as data_file: data_file = csv.reader(data_file, delimiter=',') self.count = 0 for r in data_file: self.byChar[r[0]] = int(r[1]) self.byIndex[int(r[1])] = r[0] self.count += 1 def to_unicode(self, src, index): # 1 byte code1 = int(ord(src[index + 0])) index += 1 result = code1 return result, index def convert_word_to_grapheme(self, label): result = [] index = 0 while index < len(label): (code, nextIndex) = self.to_unicode(label, index) result.append(label[index]) index = nextIndex return result, "".join(result) def convert_word_to_num(self, word): try: label_list, _ = self.convert_word_to_grapheme(word) label_num = [] for char in label_list: # skip word if char == "": pass else: label_num.append(int(self.byChar[char])) # tuple typecast: read only, faster return tuple(label_num) except AttributeError: self._log.error("unicodeSet is not loaded") exit(-1) except KeyError as err: self._log.error("unicodeSet Key not found: %s" % err) exit(-1) def convert_bi_graphemes_to_num(self, word): label_num = [] for char in word: # skip word if char == "": pass else: label_num.append(int(self.byChar[char])) # tuple typecast: read only, faster return tuple(label_num) def convert_num_to_word(self, num_list): try: label_list = [] for num in num_list: label_list.append(self.byIndex[num]) return ''.join(label_list) except AttributeError: self._log.error("unicodeSet is not loaded") exit(-1) except KeyError as err: self._log.error("unicodeSet Key not found: %s" % err) exit(-1) def get_count(self): try: return self.count except AttributeError: self._log.error("unicodeSet is not loaded") exit(-1) def get_unicode_file_path(self): try: return self.unicodeFilePath except AttributeError: self._log.error("unicodeSet is not loaded") exit(-1) def get_blank_index(self): return self.byChar["-"] def get_space_index(self): return self.byChar["$"]
def db_writer(name, status, resp_queue, config_file): LogUtil.get_instance(config_file, "db_writer") LogUtil.info("db_writer:"+name+" begin") config=configparser.ConfigParser() config.read(config_file) factory=msg.MessageProcessorFactory() db_file=config.get("message_config", "db_file") factory.load_from_db(db_file, []) read_timeout=config.getint("db_writer", "resp_queue_timeout") LogUtil.info("read_timeout:"+str(read_timeout)) if read_timeout<=0 or read_timeout>60: read_timeout=60 LogUtil.info("read_timeout changed to:"+str(read_timeout)) host=config.get("reqresp", "host") database=config.get("reqresp", "database") user=config.get("reqresp","user") password=config.get("reqresp", "password") conn=pgdb.connect(database=database, host=host, user=user, password=password) update_curs=conn.cursor() update_resp="""UPDATE req_resp SET rept_status=%(rept_status)s,ex_order_status=%(ex_order_status)s, err_code=%(err_code)s, resp_text=%(resp_text)s, resp_time=localtimestamp WHERE oms_order_id=%(oms_order_id)s """ update_dict={'rept_status':'', 'ex_order_status':'', 'err_code':'', 'resp_text':'', 'oms_order_id':''} left_buff=b'' while status.value==0: #TODO:refactor,try recv; and then process the buff #TODO:when processing buff, abstract the condition to next_message_ready() try: recv_buff=resp_queue.get(block=True, timeout=read_timeout) left_buff=left_buff+recv_buff if len(left_buff)<Message.header_len+Message.header_len: continue (message_type, body_len)=msg.get_message_header(left_buff) next_message_len=body_len+ Message.header_len+Message.footer_len while next_message_len<=len(left_buff): try: message_processor=factory.build_message_processor(message_type) message=message_processor.unpack(left_buff) LogUtil.debug("message:"+message.toString()) if True:#TODO placeholder, check if was order execution report update_dict['rept_status']='4' update_dict['ex_order_status']=message.order_status update_dict['err_code']=message.order_reject_reason update_dict['resp_text']=message.message_str update_dict['oms_order_id']=message.client_order_id update_curs.execute(update_resp, update_dict) if update_curs.rowcount!=1: #TODO error handle, rollback? LogUtil.error("no data update"+message.toString()) else: conn.commit() except KeyError: LogUtil.error("unkown message type:"+str(message_type)) left_buff=left_buff[next_message_len:] if len(left_buff)<Message.header_len+Message.footer_len: break else: (message_type, body_len)=msg.get_message_header(left_buff) next_message_len=body_len+ Message.header_len+Message.footer_len except queue.Empty: LogUtil.debug("resp_queue no data") # except KeyError: # LogUtil.error("unkown message type:"+str(message_type)) else: LogUtil.info("db_writer finished processing:"+message.toString()) LogUtil.info("db_writer:"+name+" end")
def main(): if len(sys.argv)<2: print("Usage: tgw.py config_file") sys.exit(0) #read mdgw connection config config_file=sys.argv[1] run_status=multiprocessing.Value('i', 0)#0:运行;1:退出 message_header_struct = struct.Struct('!II') logon_struct = struct.Struct('!20s20sI16s32s') message_footer_struct = struct.Struct('!I') send_buff = ctypes.create_string_buffer(message_header_struct.size+logon_struct.size+message_footer_struct.size) bodyLength = logon_struct.size message_header =(1, bodyLength) message_header_struct.pack_into(send_buff, 0, *message_header) LogUtil.get_instance(config_file, "log") LogUtil.info("Begin") config=configparser.ConfigParser() config.read(config_file) sender_comp=config.get("tgw","sender_comp") target_comp=config.get("tgw","target_comp") password=config.get("tgw","password") app_ver_id=config.get("tgw","app_ver_id") sender_comp = str.encode(sender_comp.ljust(20)) target_comp = str.encode(target_comp.ljust(20)) password = str.encode(password.ljust(16)) app_ver_id = str.encode(app_ver_id.ljust(32)) logon_body = (sender_comp, target_comp, 30, password, app_ver_id) logon_struct.pack_into(send_buff, message_header_struct.size, *logon_body) check_sum = msg.calculate_check_sum(send_buff, message_header_struct.size+logon_struct.size) message_footer_struct.pack_into(send_buff, message_header_struct.size+logon_struct.size, check_sum) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_ip=config.get("tgw", "ip") server_port=config.getint("tgw", "port") #logger initialize server_address = (server_ip, server_port) sock.connect(server_address) sock.settimeout(5) sock.setblocking(True) try: LogUtil.debug(binascii.hexlify(send_buff)) sock.sendall(send_buff) recv_data = sock.recv(1024) if not recv_data: LogUtil.error('Recv error') else: LogUtil.info('Recv OK') LogUtil.info(binascii.hexlify(recv_data)) unpack_recv_data = message_header_struct.unpack_from(recv_data) LogUtil.info(unpack_recv_data) #print(binascii.hexlify(recv_data)) if unpack_recv_data[0]==1: LogUtil.info('Receive Login Confirm!') #TODO:send report sync factory=msg.MessageProcessorFactory() db_file=config.get("message_config", "db_file") factory.load_from_db(db_file, []) message_processor=factory.build_message_processor(5) buff=message_processor.pack("ReportIndex=1") sock.sendall(buff) req_queue=multiprocessing.Queue() resp_queue=multiprocessing.Queue() dbreader_proc=multiprocessing.Process(target=db_reader, args=('DBReader', run_status, req_queue, config_file)) dbwriter_proc=multiprocessing.Process(target=db_writer, args=('DBWriter', run_status, resp_queue, config_file)) send_proc=multiprocessing.Process(target=tgw_send, args=('TGW sender', run_status, sock, req_queue, config_file)) recv_proc=multiprocessing.Process(target=tgw_recv, args=('TGW receiver', run_status, sock, resp_queue, config_file)) dbreader_proc.start() dbwriter_proc.start() send_proc.start() recv_proc.start() time.sleep(10) cmd=input("enter command:") while cmd!='q': time.sleep(2) cmd=input("enter command:") LogUtil.warning("sending exit cmd") run_status.value=1 dbreader_proc.join() dbwriter_proc.join() send_proc.join() recv_proc.join() #发送退出消息并处理应答 logout_message=msg.packLogoutMessage() sock.sendall(logout_message) recv_data = sock.recv(1024) if not recv_data: LogUtil.error('Recv logout_message error!') else: LogUtil.info('Recv logout_message OK') LogUtil.debug(binascii.hexlify(recv_data)) finally: sock.close() LogUtil.info ('End')
def md_responsor(name, status, resp_queue, config_file): LogUtil.get_instance(config_file, "md_responsor") LogUtil.info("md_responsor:"+name+" begin") config = configparser.ConfigParser() config.read(config_file) factory = msg.MessageProcessorFactory() db_file = config.get("message_config", "db_file") factory.load_from_db(db_file, []) read_timeout = config.getint("md_responsor", "resp_queue_timeout") LogUtil.info("read_timeout:"+str(read_timeout)) if read_timeout <= 0 or read_timeout > 60: read_timeout = 60 LogUtil.info("read_timeout changed to:"+str(read_timeout)) pub_buf = config.get("md_responsor", "pub_buf") pub_msg = config.get("md_responsor", "pub_msg") pub_buf_addr = config.get("md_responsor", "pub_buf_addr") pub_msg_addr = config.get("md_responsor", "pub_msg_addr") LogUtil.debug("pub_buf:"+pub_buf+",pub_buf_addr:"+pub_buf_addr) LogUtil.debug("pub_msg:"+pub_msg+",pub_msg_addr:"+pub_msg_addr) if pub_buf: buf_ctx = zmq.Context() buf_sock = buf_ctx.socket(zmq.PUB) buf_sock.bind(pub_buf_addr) if pub_msg: msg_ctx = zmq.Context() msg_sock = msg_ctx.socket(zmq.PUB) msg_sock.bind(pub_msg_addr) left_buff = b'' message_id = 0 while status.value == 0: # TODO:refactor,try recv; and then process the buff # TODO:when processing buff, abstract the condition to next_message_ready() try: (buf_id, src_time, recv_buff) = resp_queue.get(block=True, timeout=read_timeout) if pub_buf: # TODO:topic? buf_sock.send_pyobj((buf_id, src_time, recv_buff)) left_buff = left_buff+recv_buff if len(left_buff) < Message.header_len+Message.header_len: continue (message_type, body_len) = msg.get_message_header(left_buff) next_message_len = body_len + Message.header_len+Message.footer_len while next_message_len <= len(left_buff): try: message_processor = factory.build_message_processor(message_type) message = message_processor.unpack(left_buff) message_id = message_id+1 LogUtil.debug("message:"+message.toString()) if pub_msg: # TODO:topic? src_time = datetime.datetime.now() msg_sock.send_pyobj((message_type, message_id, src_time, message.message_str)) except KeyError: LogUtil.error("unkown message type:"+str(message_type)) except Exception as e: LogUtil.error(e) LogUtil.error("other error:"+traceback.print_exc()) left_buff = left_buff[next_message_len:] if len(left_buff) < Message.header_len+Message.footer_len: break else: (message_type, body_len) = msg.get_message_header(left_buff) next_message_len = body_len + Message.header_len+Message.footer_len except queue.Empty: LogUtil.debug("resp_queue no data") # except KeyError: # LogUtil.error("unkown message type:"+str(message_type)) else: pass LogUtil.info("md_responsor:"+name+" end")
class LabelUtil: _log = None # dataPath def __init__(self): self._log = LogUtil().getlogger() self._log.debug("LabelUtil init") def load_unicode_set(self, unicodeFilePath): self.byChar = {} self.byIndex = {} self.unicodeFilePath = unicodeFilePath with open(unicodeFilePath) as data_file: data_file = csv.reader(data_file, delimiter=',') self.count = 0 for r in data_file: self.byChar[r[0]] = int(r[1]) self.byIndex[int(r[1])] = r[0] self.count += 1 def to_unicode(self, src, index): # 1 byte code1 = int(ord(src[index + 0])) index += 1 result = code1 return result, index def convert_word_to_grapheme(self, label): result = [] index = 0 while index < len(label): (code, nextIndex) = self.to_unicode(label, index) result.append(label[index]) index = nextIndex return result, "".join(result) def convert_word_to_num(self, word): try: label_list, _ = self.convert_word_to_grapheme(word) label_num = [] for char in label_list: # skip word if char == "": pass else: label_num.append(int(self.byChar[char])) # tuple typecast: read only, faster return tuple(label_num) except AttributeError: self._log.error("unicodeSet is not loaded") exit(-1) except KeyError as err: self._log.error("unicodeSet Key not found: %s" % err) exit(-1) def convert_bi_graphemes_to_num(self, word): label_num = [] for char in word: # skip word if char == "": pass else: label_num.append(int(self.byChar[char])) # tuple typecast: read only, faster return tuple(label_num) def convert_num_to_word(self, num_list): try: label_list = [] for num in num_list: label_list.append(self.byIndex[num]) return ''.join(label_list) except AttributeError: self._log.error("unicodeSet is not loaded") exit(-1) except KeyError as err: self._log.error("unicodeSet Key not found: %s" % err) exit(-1) def get_count(self): try: return self.count except AttributeError: self._log.error("unicodeSet is not loaded") exit(-1) def get_unicode_file_path(self): try: return self.unicodeFilePath except AttributeError: self._log.error("unicodeSet is not loaded") exit(-1) def get_blank_index(self): return self.byChar["-"] def get_space_index(self): return self.byChar["$"]
class WebForm(QtWebEngineWidgets.QWebEngineView): closeEvent = pyqtSignal(QCloseEvent) @pyqtSlot() def finish(self): self.closeEvent.emit(QCloseEvent()) if __name__ == "__main__": from sys import argv, exit a = QApplication(argv) server = QWebSocketServer( "QWebChannel Standalone Server", QWebSocketServer.NonSecureMode ) if not server.listen(QHostAddress.LocalHost, 12345): log.error("监听端口 12345 失败,客户端已经打开...") exit(1) clientWrapper = WebSocketClientWrapper(server) channel = QWebChannel() clientWrapper.clientConnected.connect(channel.connectTo) dialog = Dialog() channel.registerObject("dialog", dialog) #========================初始化OMServer 连接============================= omClient=OMClient("omClient"); omClient.start(); # url=QUrl.fromLocalFile("./index.html"); # url.setQuery("webChannelBaseUrl="+server.serverUrl().toString()) # QDesktopServices.openUrl(url);