def read(self, msg: MSG): # logging.debug("FILE::{0}::READ msg:{1}".format(self.conn, msg)) # ------------------------------------------------- rm_idx = [] for i in range(0, msg.args_count, 1): arguments = msg.read_args(i) if self.url_is_dir: path = os.path.join(self.conn.path, arguments["path"]) else: path = self.conn.path suffix = os.path.splitext(path)[-1] # ------------------------------- if os.path.isfile(path): if suffix in [".xls", ".xlsx"]: if self.url_is_dir: datum = pd.read_excel(path) else: datum = pd.read_excel(path, sheet_name=arguments["path"]) elif suffix in [".npy"]: datum = np.load(path) else: with open(path, "rb") as file: datum = file.read() msg.add_datum(datum, arguments["path"]) logging.info("FILE::{0}::READ successfully.".format(path)) else: logging.warning("FILE::{0}::READ failed.".format(path)) rm_idx = [i] + rm_idx # ------------------------------- if CONFIG.IS_DATA_READ_START: for i in rm_idx: msg.remove_args(i) logging.info("FILE::{0}::READ successfully.".format(self.conn)) return msg
def read(self, msg: MSG): # logging.debug("FTP::{0}::READ msg:{1}".format(self.conn, msg)) ftp = self.open() # ------------------------------------------------- rm_idx = [] for i in range(0, msg.args_count, 1): argument = msg.read_args(i) if self.url_is_dir: path = os.path.join(self.conn.path, argument["path"]) else: path = self.conn.path suffix = os.path.splitext(path)[-1] # ---------------------------- stream = bytes() ftp.retrbinary('RETR ' + path, stream, CONFIG.FTP_BUFFER) temp_path = "temp/temp" + suffix mkdir(os.path.split(temp_path)[0]) # ------------------------------- if suffix in [".xls", ".xlsx"]: with open(temp_path, "wb") as temp_file: temp_file.write(stream) if self.url_is_dir: stream = pd.read_excel(temp_path) else: stream = pd.read_excel(temp_path, sheet_name=argument["path"]) logging.info( "FTP::EXCEL::{0}::READ successfully.".format(path)) elif suffix in [".npy"]: with open(temp_path, "wb") as temp_file: temp_file.write(stream) stream = np.load(temp_path) logging.info( "FTP::NUMPY::{0}::READ successfully.".format(path)) else: logging.info("FTP::{0}::READ successfully.".format(path)) os.remove(temp_path) os.removedirs(os.path.split(temp_path)[0]) del temp_path # ------------------------------- msg.add_datum(datum=stream, path=argument["path"]) rm_idx = [i] + rm_idx # ------------------------------------------------- if CONFIG.IS_DATA_READ_START: for i in rm_idx: msg.remove_args(i) logging.info("FTP::{0}::READ successfully.".format(self.conn)) return msg
def read(self, msg: MSG): # logging.debug("SQL::{0}::READ msg:{1}".format(self.conn, msg)) # ---------------------------------- rm_idx = [] for i in range(0, msg.args_count, 1): argument = msg.read_args(i) df = pd.read_sql(sql=argument["stream"], con=self.engine) df = df.rename(str.upper, axis='columns') msg.add_datum(datum=df, path=argument["path"]) rm_idx = [i] + rm_idx # ---------------------------------- if CONFIG.IS_DATA_READ_START: for i in rm_idx: msg.remove_args(i) logging.info("SQL::{0}::READ successfully.".format(self.conn)) return msg