def read(self, msg: MSG): # logging.debug("FILE::{0}::READ msg:{1}".format(self.conn, msg)) # ------------------------------------------------- rm_idx = [] for i in range(0, msg.args_count, 1): arguments = msg.read_args(i) if self.url_is_dir: path = os.path.join(self.conn.path, arguments["path"]) else: path = self.conn.path suffix = os.path.splitext(path)[-1] # ------------------------------- if os.path.isfile(path): if suffix in [".xls", ".xlsx"]: if self.url_is_dir: datum = pd.read_excel(path) else: datum = pd.read_excel(path, sheet_name=arguments["path"]) elif suffix in [".npy"]: datum = np.load(path) else: with open(path, "rb") as file: datum = file.read() msg.add_datum(datum, arguments["path"]) logging.info("FILE::{0}::READ successfully.".format(path)) else: logging.warning("FILE::{0}::READ failed.".format(path)) rm_idx = [i] + rm_idx # ------------------------------- if CONFIG.IS_DATA_READ_START: for i in rm_idx: msg.remove_args(i) logging.info("FILE::{0}::READ successfully.".format(self.conn)) return msg
def demo_excel(): path = "C:/Users/geyua/PycharmProjects/Babelor/data/dir1/20190505.xlsx" df = pd.read_excel(path) msg = MSG() msg.add_datum(df, path) message = str(msg) new_msg = MSG(message) print(msg) print(new_msg)
def try_push(url: str): msg = MSG() msg.origination = URL("tcp://127.0.0.1:10001") msg.destination = URL("tcp://127.0.0.1:10001") mq = MQ(url) for i in range(0, 10000, 1): # print("push msg:", msg) msg.activity = str(i) # print("push msg:", i, msg) mq.push(msg)
def write(self, msg: MSG): # logging.debug("FILE::{0}::WRITE msg:{1}".format(self.conn, msg)) if self.url_is_dir: if not os.path.exists(self.conn.path): os.mkdir(self.conn.path) # ------------------------------- rm_idx = [] for i in range(0, msg.dt_count, 1): dt = msg.read_datum(i) if self.url_is_dir: path = os.path.join(self.conn.path, dt["path"]) else: path = self.conn.path suffix = os.path.splitext(path)[-1] # ------------------------------- if os.path.exists(path): logging.warning("FILE::{0}::WRITE failed.".format(path)) elif os.path.isfile(os.path.split(path)[0]): logging.warning("FILE::{0}::WRITE failed.".format(path)) else: if not os.path.isdir(os.path.split(path)[0]): mkdir(os.path.split(path)[0]) # ------------------------------- if suffix in [".xls", ".xlsx"]: if isinstance(dt["stream"], pd.DataFrame): dt["stream"].to_excel(path, index=False) logging.info( "FILE::EXCEL::{0}::WRITE successfully.".format( path)) else: logging.warning( "FILE::EXCEL::{0}::WRITE failed.".format(path)) elif suffix in [".npy"]: if isinstance(dt["stream"], np.ndarray): np.save(path, dt["stream"]) logging.info( "FILE::NUMPY::{0}::WRITE successfully.".format( path)) else: logging.warning( "FILE::NUMPY::{0}::WRITE failed.".format(path)) elif suffix in [""]: logging.warning("FILE::{0}::WRITE None.".format(path)) else: with open(path, "wb") as file: file.write(dt["stream"]) logging.info("FILE::{0}::WRITE successfully.".format(path)) rm_idx = [i] + rm_idx # ------------------------------- if CONFIG.IS_DATA_WRITE_END: for i in rm_idx: msg.remove_datum(i)
def read(self, msg: MSG): # logging.debug("FTP::{0}::READ msg:{1}".format(self.conn, msg)) ftp = self.open() # ------------------------------------------------- rm_idx = [] for i in range(0, msg.args_count, 1): argument = msg.read_args(i) if self.url_is_dir: path = os.path.join(self.conn.path, argument["path"]) else: path = self.conn.path suffix = os.path.splitext(path)[-1] # ---------------------------- stream = bytes() ftp.retrbinary('RETR ' + path, stream, CONFIG.FTP_BUFFER) temp_path = "temp/temp" + suffix mkdir(os.path.split(temp_path)[0]) # ------------------------------- if suffix in [".xls", ".xlsx"]: with open(temp_path, "wb") as temp_file: temp_file.write(stream) if self.url_is_dir: stream = pd.read_excel(temp_path) else: stream = pd.read_excel(temp_path, sheet_name=argument["path"]) logging.info( "FTP::EXCEL::{0}::READ successfully.".format(path)) elif suffix in [".npy"]: with open(temp_path, "wb") as temp_file: temp_file.write(stream) stream = np.load(temp_path) logging.info( "FTP::NUMPY::{0}::READ successfully.".format(path)) else: logging.info("FTP::{0}::READ successfully.".format(path)) os.remove(temp_path) os.removedirs(os.path.split(temp_path)[0]) del temp_path # ------------------------------- msg.add_datum(datum=stream, path=argument["path"]) rm_idx = [i] + rm_idx # ------------------------------------------------- if CONFIG.IS_DATA_READ_START: for i in rm_idx: msg.remove_args(i) logging.info("FTP::{0}::READ successfully.".format(self.conn)) return msg
def read(self, msg: MSG): # logging.debug("SQL::{0}::READ msg:{1}".format(self.conn, msg)) # ---------------------------------- rm_idx = [] for i in range(0, msg.args_count, 1): argument = msg.read_args(i) df = pd.read_sql(sql=argument["stream"], con=self.engine) df = df.rename(str.upper, axis='columns') msg.add_datum(datum=df, path=argument["path"]) rm_idx = [i] + rm_idx # ---------------------------------- if CONFIG.IS_DATA_READ_START: for i in rm_idx: msg.remove_args(i) logging.info("SQL::{0}::READ successfully.".format(self.conn)) return msg
def write(self, msg: MSG): # logging.debug("FTP::{0}::WRITE msg:{1}".format(self.conn, msg)) ftp = self.open() # ------------------------------------------------- rm_idx = [] for i in range(0, msg.dt_count, 1): dt = msg.read_datum(i) if self.url_is_dir: path = os.path.join(self.conn.path, dt["path"]) else: path = self.conn.path # ---------------------------- suffix = os.path.splitext(path)[-1] temp_path = "temp/temp" + suffix mkdir(os.path.split(temp_path)[0]) # ---------------------------- if suffix in [".xls", ".xlsx"]: if isinstance(dt["stream"], pd.DataFrame): dt["stream"].to_excel(temp_path, index=False) with open(temp_path, "rb") as temp_file: stream = temp_file.read() logging_info = "::EXCEL" else: stream = None logging_info = "::EXCEL" elif suffix in [".npy"]: if isinstance(dt["stream"], np.ndarray): np.save(temp_path, dt["stream"]) with open(temp_path, "rb") as temp_file: stream = temp_file.read() logging_info = "::NUMPY" else: stream = None logging_info = "::NUMPY" else: stream = dt["stream"] logging_info = "" # ---------------------------- ftp.storbinary('STOR ' + path, stream, CONFIG.FTP_BUFFER) rm_idx = [i] + rm_idx logging.info("FTP{0}::{1}::WRITE successfully.".format( logging_info, self.conn)) # ------------------------------------------------- if CONFIG.IS_DATA_WRITE_END: for i in rm_idx: msg.remove_datum(i) ftp.close()
def _create_mime(self, msg: MSG): # Connection sender_user = self.conn.fragment.username # 寄件人用户名 receiver_user = self.conn.username # 收件人用户名 sender_name = self.conn.fragment.fragment.path # 寄件人名 receiver_name = self.conn.path # 收件人名 sender_postfix = self.conn.fragment.fragment.hostname # 寄件人 postfix receiver_postfix = self.conn.hostname # 收件人 postfix # from msg data = {} for i in range(0, msg.dt_count, 1): datum = msg.read_datum(i) if datum["path"] not in data.keys(): data[datum["path"]] = datum["stream"] if "subject" in data.keys(): subject = data["subject"] else: subject = CONFIG.MAIL_SUBJECT if "content" in data.keys(): content = data["content"] else: content = CONFIG.MAIL_CONTENT attachments = [] for k in data.keys(): if k not in ["subject", "content"]: attachments.append({ "stream": data[k], "path": k, }) # Structure MIME self.me = str( Address( Header(sender_name, CONFIG.Coding).encode(), sender_user, sender_postfix)) self.to = str( Address( Header(receiver_name, CONFIG.Coding).encode(), receiver_user, receiver_postfix)) self.subject = subject self.content = content self.mime['from'] = self.me # 寄件人 self.mime['to'] = self.to # 收件人 self.mime['subject'] = Header(self.subject, 'UTF-8').encode() # 标题 self.mime["Accept-Language"] = "zh-CN" # 语言 self.mime["Accept-Charset"] = "ISO-8859-1,utf-8" # 字符集 self.mime.attach(MIMEText(self.content, 'plain', "utf-8")) # 正文 if len(attachments) > 0: # 附件 for attachment in attachments: part = MIMEBase('application', 'octet-stream') part.set_payload(attachment["stream"]) encoders.encode_base64(part) part.add_header('Content-Disposition', 'attachment', filename=Header( attachment["path"].split("/")[-1], 'UTF-8').encode()) self.mime.attach(part)
def demo_numpy(): arr = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]) msg = MSG() msg.add_datum(arr) message = str(msg) new_msg = MSG(message) print(msg) print(new_msg) print(new_msg.read_datum(0)["stream"])
def func_encrypter(msg: MSG): # -————————————------------------------ INIT --------- data = {} for i in range(0, msg.dt_count, 1): datum = msg.read_datum(i) if datum["path"] not in data.keys(): data[datum["path"]] = datum["stream"] # -————————————------------------------ PROCESS ------ msg_out = msg # -————————————------------------------ END ---------- return msg_out
def func_sender(msg: MSG): # -—————————————------------------------ INIT --------- arguments = {} for i in range(0, msg.dt_count, 1): argument = msg.read_datum(i) if argument["path"] not in arguments.keys(): arguments[argument["path"]] = argument["stream"] # -—————————————------------------------ PROCESS ------ msg_out = msg # -—————————————------------------------ END ---------- return msg_out
def write(self, msg: MSG): # logging.debug("SQL::{0} write:{1}".format(self.conn, msg)) # ---------------------------------- rm_idx = [] for i in range(0, msg.dt_count, 1): rt = msg.read_datum(i) df = rt["stream"] path = os.path.splitext(rt["path"])[0] if isinstance(df, pd.DataFrame): df.to_sql(path, con=self.engine, if_exists='replace', index=False, index_label=False) logging.info("SQL::{0}::WRITE successfully.".format(self.conn)) else: logging.warning("SQL::{0}::WRITE failed.".format(self.conn)) rm_idx = [i] + rm_idx # ---------------------------------- if CONFIG.IS_DATA_WRITE_END: for i in rm_idx: msg.remove_datum(i)
def try_request(): msg = MSG() msg.origination = URL("tcp://127.0.0.1:10001") mq = MQ(URL("tcp://127.0.0.1:10001")) msg = mq.request(msg)
def first_out_last_in(conn: str, me: str, queue_ctrl: Queue, pipe_in: Pipe, pipe_out: Pipe): """ # 先出后进 / 只出 # 控制信号 --> 输出队列 --> 输出队列 --> 反馈信号 --> 输入队列 :param conn: str # 套接字 "tcp://<hostname>:<port>" :param me: str # 传输方式 ["REQUEST", "SUBSCRIBE", "PUSH"] :param queue_ctrl: Queue # 控制队列 ("is_active",):(bool,) :param pipe_in: Pipe # 输入队列 ("msg_in",):(MSG,) :param pipe_out: Pipe # 输出队列 ("msg_out",):(MSG,) :return: None """ context = zmq.Context() # ------- REQUEST ---------------------------- if me in ["REQUEST"]: socket = context.socket(zmq.REQ) socket.connect(conn) handshake, has_response = 0, True # ------- SUBSCRIBE -------------------------- elif me in ["SUBSCRIBE"]: socket = context.socket(zmq.SUB) socket.connect(conn) handshake, has_response = 1, True # ------- PUSH ------------------------------ elif me in ["PUSH"]: socket = context.socket(zmq.PUSH) socket.connect(conn) handshake, has_response = 0, False # ------- DEFAULT: PUSH --------------------- else: socket = context.socket(zmq.PUSH) socket.connect(conn) handshake, has_response = 0, False logging.debug("ZMQ::FOLI::{0} connect:{1}".format(me, conn)) # ------------------------------------- QUEUE is_active = queue_ctrl.get() while is_active: if queue_ctrl.empty(): # SEND -------------------------------- if handshake == 1: socket.setsockopt(zmq.SUBSCRIBE, '') logging.debug("ZMQ::FOLI::{0}::{1} send:{2}".format( me, conn, "zmq.SUBSCRIBE")) else: try: msg_out = pipe_out.recv() logging.debug( "ZMQ::FOLI::{0}::{1}::PIPE OUT recv:{2}".format( me, conn, msg_out)) message_out = str(msg_out).encode(CONFIG.Coding) socket.send(message_out) logging.debug("ZMQ::FOLI::{0}::{1} send:{2}".format( me, conn, message_out)) except EOFError: is_active = False # RECV -------------------------------- if has_response: message_in = socket.recv() logging.debug("ZMQ::FOLI::{0}::{1} recv:{2}".format( me, conn, message_in)) msg_in = MSG(message_in.decode(CONFIG.Coding)) pipe_in.send(msg_in) logging.debug("ZMQ::FOLI::{0}::{1}::PIPE IN send:{2}".format( me, conn, msg_in)) else: is_active = queue_ctrl.get() else: queue_ctrl.close()
def first_in_last_out(conn: str, me: str, queue_ctrl: Queue, pipe_in: Pipe, pipe_out: Pipe): """ # 先进后出 / 只进 # 控制信号(启动)--> 输入队列(推入)--> 控制信号(需反馈)--> 输出队列(推出) :param conn: str # 套接字 :param me: str # 传输方式 ( "REPLY", "SUBSCRIBE", "PULL") :param queue_ctrl: Queue # 控制队列 ("is_active", "is_response"):(bool, bool) :param pipe_in: Pipe # 输入队列 (MSG,) :param pipe_out: pipe # 输出队列 (MSG,) :return: None """ context = zmq.Context() # ------- REPLY ---------------------------- if me in ["REPLY"]: socket = context.socket(zmq.REP) socket.bind(conn) has_response = True # ------- REPLY ---------------------------- elif me in ["PUBLISH"]: socket = context.socket(zmq.PUB) socket.bind(conn) has_response = False # ------- PULL ----------------------------- elif me in ["PULL"]: socket = context.socket(zmq.PULL) socket.bind(conn) has_response = False # ------- DEFAULT: PULL ----------------------------- else: socket = context.socket(zmq.PULL) socket.bind(conn) has_response = False logging.debug("ZMQ::FILO::{0} bind:{1}".format(me, conn)) # ------------------------------------- QUEUE is_active = queue_ctrl.get() while is_active: if queue_ctrl.empty(): # RECV -------------------------------- message_in = socket.recv() logging.debug("ZMQ::FILO::{0}::{1} recv:{2}".format( me, conn, message_in)) msg_in = MSG(message_in.decode(CONFIG.Coding)) pipe_in.send(msg_in) logging.debug("ZMQ::FILO::{0}::{1}::PIPE IN send:{2}".format( me, conn, msg_in)) # SEND -------------------------------- if has_response: try: msg_out = pipe_out.recv() logging.debug( "ZMQ::FILO::{0}::{1}::PIPE OUT recv:{2}".format( me, conn, msg_out)) message_out = str(msg_out).encode(CONFIG.Coding) socket.send(message_out) logging.debug("ZMQ::FILO::{0}::{1} send:{2}".format( me, conn, message_out)) except EOFError: is_active = False else: is_active = queue_ctrl.get() else: queue_ctrl.close()
def encrypter(url): myself = TEMPLE(url) myself.open(role="encrypter", func=func_encrypter) def receiver(url): myself = TEMPLE(url) myself.open(role="receiver") def receiver_init(): # -————————————------------------------ MESSAGE ----- case = CASE("{0}#{1}".format(origination_url, destination_url)) receiver_msg = MSG() receiver_msg.case = case # -————————————------------------------ RECEIVER ---- receiver_msg.origination = edge_node_url["inner"] receiver_msg.destination = destination_url # logging.warning("RECEIVER::INIT::{0} send:{1}".format(receiver_url["inner"], receiver_msg)) recv_init = MQ(receiver_url["outer"]) recv_init.push(receiver_msg) def sender_init(): # -————————————------------------------ MESSAGE ----- case = CASE("{0}#{1}".format(origination_url, destination_url)) sender_msg = MSG() sender_msg.case = case # -————————————------------------------ SENDER ------
def try_reply_func(msg: MSG): msg.destination = URL().init("oracle") return msg
def try_publish(): msg = MSG() msg.origination = URL("tcp://127.0.0.1:10001") mq = MQ(URL("tcp://*:10001")) print("publish msg:", msg) mq.publish(msg)
def demo_msg_mysql2ftp(): msg = MSG() msg.origination = URL().init("mysql").check msg.destination = URL().init("ftp").check msg.treatment = URL().init("tcp") case = CASE() case.origination = msg.origination case.destination = msg.destination msg.activity = "init" msg.case = case msg.add_datum("This is a test string", path=URL().init("file")) msg.add_datum("中文UTF-8编码测试", path=URL().init("file")) path = "..\README\Babelor-设计.png" with open(path, "rb") as f: bytes_f = f.read() url = URL().init("file") url.path = path msg.add_datum(bytes_f, url) msg_string = str(msg) new_msg = MSG(msg_string) new_bytes_f = new_msg.read_datum(3)["stream"] new_path = "..\README\Babelor-设计-new.png" with open(new_path, "wb") as f: f.write(new_bytes_f)