def mlst(connection: ftplib.FTP, filename: str, facts: Optional[List[str]] = None): """ Execute mlst command on the server. :param connection: to the server :param filename: filename :param facts: see ftplib.mlsd doc :return: """ # pylint: disable=too-many-locals fact_lst = [] if facts is None else facts if fact_lst: connection.sendcmd("OPTS MLST " + ";".join(fact_lst) + ";") resp = connection.sendcmd("MLST {}".format(filename)) lines = resp.split('\n') if len(lines) <= 1: raise ftplib.Error( "Unexpected number of lines in an MLST response: {!r}".format( resp)) line = lines[1].lstrip(' ') line_parts = line.split(' ') if len(line_parts) != 2: raise ftplib.Error( "Unexpected partition of MLST fact line by space: {!r}".format( resp)) facts_found, pth = line_parts facts_found = facts_found.rstrip(';') parts = facts_found.split(';') entry = {} for part in parts: fact_parts = part.split('=') if len(fact_parts) != 2: raise ftplib.Error( "Unexpected partition of MLST fact line by equal sign: {!r}". format(resp)) key, value = fact_parts entry[key.lower()] = value return pth, entry
class MyFTP(ftplib.FTP): """ 自定义FTP类,实现ftp上传下载及断点续传 """ def __init__(self, host="", port=21, user="", passwd="", timeout=60, force=False): self.host = host self.port = port self.user = user self.passwd = passwd self.timeout = timeout self.force = force self._conn_login() def _conn_login(self): """ connection and login """ try: self.connect(self.host, self.port, self.timeout) except Exception, e: sys.stderr.write("connect failed - {0}".format(e)) raise ftplib.Error("connect failed - {0}".format(e)) try: self.login(self.user, self.passwd) except Exception, e: sys.stderr.write("login failed - {0}".format(e)) raise ftplib.Error("login failed - {0}".format(e))
def ftp_upload(self, file: str, remote_dir: str, backup_stamp: int, session, local_logger) -> bool: f_name = os.path.basename(file) dir_stamp = f"{remote_dir}/{backup_stamp}" for directory in (remote_dir, dir_stamp): try: session.mkd(directory) except ftplib.error_perm as perm: pass except ftplib.Error as e: local_logger.exception(e) raise ftplib.Error(e) file_fh = open(file, "rb") try: session.storbinary(f"STOR {dir_stamp}/{f_name}", file_fh, blocksize=10000000) local_logger.info(f"file {dir_stamp}/{f_name} uploaded") return True except ftplib.Error as e: return False finally: file_fh.close()
def _conn_login(self): """ connection and login """ try: self.connect(self.host, self.port, self.timeout) except Exception, e: sys.stderr.write("connect failed - {0}".format(e)) raise ftplib.Error("connect failed - {0}".format(e))
def retrlines(self, cmd, callback = None): """Retrieve data in line mode. A new port is created for you. Args: cmd: A RETR, LIST, or NLST command. callback: An optional single parameter callable that is called for each line with the trailing CRLF stripped. [default: print_line()] Returns: The response code. Tolerant of f****d up encoding issues. All received strings are decoded with the encoding detected by chardet if the confidence is > 70%. The default encoding is used if the chardet detected encoding lacks sufficent confidence. """ if callback is None: callback = ftplib.print_line self.sendcmd('TYPE A') with self.transfercmd(cmd) as conn, conn.makefile('rb', encoding=None) as fp: while 1: line = fp.readline(self.maxline + 1) guess = chardet.detect(line) # print("Line type = ", type(line)) # print("conn type = ", type(conn)) # print("Guessed encoding - ", chardet.detect(line)) # print(line) if guess['confidence'] > 0.7: line = line.decode(guess['encoding']) else: line = line.decode(self.encoding) if len(line) > self.maxline: raise ftplib.Error("got more than %d bytes" % self.maxline) if self.debugging > 2: print('*retr*', repr(line)) if not line: break if line[-2:] == ftplib.CRLF: line = line[:-2] elif line[-1:] == '\n': line = line[:-1] callback(line) # shutdown ssl layer if ftplib._SSLSocket is not None and isinstance(conn, ftplib._SSLSocket): conn.unwrap() return self.voidresp()
def ftp_conn(self, local_logger): local_logger.info("try ftp_conn") try: session = ftplib.FTP(self.ftp_host, self.ftp_user, self.ftp_pass, timeout=180) local_logger.info("ftp conn OK") return session except ftplib.Error as e: local_logger.exception(e) raise ftplib.Error(e) except socket.timeout as to: local_logger.exception(to) raise socket.timeout() except OSError as oe: local_logger.exception(oe) raise OSError
def upload(self, local_path, remote_path): """ 上传 """ # check local file exists if not os.path.exists(local_path): raise ftplib.Error("local file doesn't exists") # self.set_debuglevel(0) dires = self.splitpath(remote_path) remotefile = dires[1] # 多层级目录 for dir in dires[0].split('/'): if self.force: try: self.cwd(dir) except: self.mkd(dir) self.cwd(dir) else: self.cwd(dir) # get remote file info rsize = 0L try: rsize = self.size(remotefile) except: pass if (rsize == None): rsize = 0L # get local file info lsize = os.stat(local_path).st_size if lsize == rsize: return if rsize < lsize: # 断点续传 localf = open(local_path, 'rb') localf.seek(rsize) self.storbinary("STOR " + remotefile, localf, blocksize=1024 * 1024, rest=rsize) localf.close()
def getline(self): line = self.file.readline(self.maxline + 1) if len(line) > self.maxline: raise ftplib.Error("got more than %d bytes" % self.maxline) if self.debugging > 1: print('*get*', self.sanitize(line)) if self.use_log: with self.lock: self._log = (self._log + ('\n*get* {0}'.format(self._sanitize(line)))) with self.lock: self._last_response = self._sanitize(line) if not line: raise EOFError if line[-2:] == ftplib.CRLF: line = line[:-2] elif line[-1:] in ftplib.CRLF: line = line[:-1] return line
def ftp_dir_remove(self, session, path_q: str, local_logger): mlsd_facts = session.mlsd(path=path_q) for (name, facts) in mlsd_facts: if name in ['.', '..']: continue elif facts['type'] == 'file': try: local_logger.info(f"trying to delete {path_q}/{name}") session.delete(f"{path_q}/{name}") except ftplib.Error as e: local_logger.info(f"ERROR {e}") except socket.timeout as to: local_logger.info(f"ERROR {to}") elif facts['type'] == 'dir': self.ftp_dir_remove(session, f"{path_q}/{name}", local_logger) try: session.rmd(path_q) except ftplib.Error as e: raise ftplib.Error(e)
def get_bsrn(station, start, end, username, password, logical_records=('0100', ), save_path=None): """ Retrieve ground measured irradiance data from the BSRN FTP server. The BSRN (Baseline Surface Radiation Network) is a world wide network of high-quality solar radiation monitoring stations as described in [1]_. Data is retrieved from the BSRN FTP server [2]_. Data is returned for the entire months between and including start and end. Parameters ---------- station: str 3-letter BSRN station abbreviation start: datetime-like First day of the requested period end: datetime-like Last day of the requested period username: str username for accessing the BSRN FTP server password: str password for accessing the BSRN FTP server logical_records: list or tuple, default: ('0100',) List of the logical records (LR) to parse. Options include: '0100', '0300', and '0500'. save_path: str or path-like, optional If specified, a directory path of where to save each monthly file. Returns ------- data: DataFrame timeseries data from the BSRN archive, see :func:`pvlib.iotools.read_bsrn` for fields. An empty DataFrame is returned if no data was found for the time period. metadata: dict metadata for the last available monthly file. Raises ------ KeyError If the specified station does not exist on the FTP server. Warns ----- UserWarning If one or more requested files are missing a UserWarning is returned with a list of the filenames missing. If no files match the specified station and timeframe a seperate UserWarning is given. Notes ----- The username and password for the BSRN FTP server can be obtained for free as described in the BSRN's Data Release Guidelines [3]_. Currently only parsing of logical records 0100, 0300 and 0500 is supported. Note not all stations measure LR0300 and LR0500. However, LR0100 is mandatory as it contains the basic irradiance and auxillary measurements. See [4]_ for a description of the different logical records. Future updates may include parsing of additional data and metadata. Important --------- While data from the BSRN is generally of high-quality, measurement data should always be quality controlled before usage! Examples -------- >>> # Retrieve two months irradiance data from the Cabauw BSRN station >>> data, metadata = pvlib.iotools.get_bsrn( # doctest: +SKIP >>> start=pd.Timestamp(2020,1,1), end=pd.Timestamp(2020,12,1), # doctest: +SKIP >>> station='cab', username='******', password='******') # doctest: +SKIP See Also -------- pvlib.iotools.read_bsrn, pvlib.iotools.parse_bsrn References ---------- .. [1] `World Radiation Monitoring Center - Baseline Surface Radiation Network (BSRN) <https://bsrn.awi.de/>`_ .. [2] `BSRN Data Retrieval via FTP <https://bsrn.awi.de/data/data-retrieval-via-ftp/>`_ .. [4] `BSRN Data Release Guidelines <https://bsrn.awi.de/data/conditions-of-data-release/>`_ .. [3] `Update of the Technical Plan for BSRN Data Management, 2013, Global Climate Observing System (GCOS) GCOS-174. <https://bsrn.awi.de/fileadmin/user_upload/bsrn.awi.de/Publications/gcos-174.pdf>`_ """ # noqa: E501 # The FTP server uses lowercase station abbreviations station = station.lower() # Generate list files to download based on start/end (SSSMMYY.dat.gz) filenames = pd.date_range( start, end.replace(day=1) + pd.DateOffset(months=1), freq='1M')\ .strftime(f"{station}%m%y.dat.gz").tolist() # Create FTP connection with ftplib.FTP(BSRN_FTP_URL, username, password) as ftp: # Change to station sub-directory (checks that the station exists) try: ftp.cwd(f'/{station}') except ftplib.error_perm as e: raise KeyError('Station sub-directory does not exist. Specified ' 'station is probably not a proper three letter ' 'station abbreviation.') from e dfs = [] # Initialize list for monthly dataframes non_existing_files = [] # Initilize list of files that were not found for filename in filenames: try: bio = io.BytesIO() # Initialize BytesIO object # Retrieve binary file from server and write to BytesIO object response = ftp.retrbinary(f'RETR {filename}', bio.write) # Check that transfer was successfull if not response.startswith('226 Transfer complete'): raise ftplib.Error(response) # Save file locally if save_path is specified if save_path is not None: # Create local file with open(os.path.join(save_path, filename), 'wb') as f: f.write(bio.getbuffer()) # Write local file # Open gzip file and convert to StringIO bio.seek(0) # reset buffer to start of file gzip_file = io.TextIOWrapper(gzip.GzipFile(fileobj=bio), encoding='latin1') dfi, metadata = parse_bsrn(gzip_file, logical_records) dfs.append(dfi) # FTP client raises an error if the file does not exist on server except ftplib.error_perm as e: if str(e) == '550 Failed to open file.': non_existing_files.append(filename) else: raise ftplib.error_perm(e) ftp.quit() # Close and exit FTP connection # Raise user warnings if not dfs: # If no files were found warnings.warn('No files were available for the specified timeframe.') elif non_existing_files: # If only some files were missing warnings.warn( f'The following files were not found: {non_existing_files}' ) # noqa: E501 # Concatenate monthly dataframes to one dataframe if len(dfs): data = pd.concat(dfs, axis='rows') else: # Return empty dataframe data = _empty_dataframe_from_logical_records(logical_records) metadata = {} # Return dataframe and metadata (metadata belongs to last available file) return data, metadata
def Backup(date, ip, sysname, soft_type, logback_tag, logFile, backPath, isDel, isGzip, ftpipstr, ftpuser, ftppasswd, ID): starttime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())) # 取出备份日志的路径和日志名称 logDir = os.path.dirname(logFile) logName = os.path.basename(logFile) # 判断日志路径和日志是否存在 dirExsitCmd = """ test -d %s; echo $? """ % logDir fileExsitCmd = """ find %s -maxdepth 1 -name "%s" | wc -l""" % (logDir, logName) fileExsitRet = os.popen(dirExsitCmd).readlines()[0].strip('\r\n') if fileExsitRet != "0": des = 'logDir[' + logDir + '] not exsit!' msg = des logger.warning(msg) status = '1002' endtime = data() count = 0 gznum = 0 getResponse(starttime, endtime, gznum, count, status, des, ID) sys.exit() fileExsitRet = os.popen(fileExsitCmd).readlines()[0].strip('\r\n') if fileExsitRet == "0": des = 'No logFile[' + logName + '] found in [' + logDir + ']!' msg = des logger.warning(msg) print des status = '2001' endtime = data() count = 0 gznum = 0 getResponse(starttime, endtime, gznum, count, status, des, ID) sys.exit() # 是否压缩 if isGzip == 'Y': gzipFinishedCmd = """ find %s -maxdepth 1 -name "%s" ! -name "*.gz" | wc -l """ % ( logDir, logName) gznum = os.popen(gzipFinishedCmd).readlines()[0].strip('\r\n') compressFilesCmd = """ ls %s | grep -v gz$ | xargs -I {} gzip {} """ % logFile os.popen(compressFilesCmd) else: gznum = 0 ftpInstalledCmd = "command -v ftp >/dev/null; echo $?" if os.popen(ftpInstalledCmd).readlines()[0].strip('\r\n') != "0": des = "ftp Command not found!" msg = des logger.error(msg) print des endtime = data() status = '1003' count = 0 getResponse(starttime, endtime, gznum, count, status, des, ID) sys.exit() # 判断目录是否存在,存在就进去,不存在就创建 try: FtpClient = FtpTools(ftpipstr, ftpuser, ftppasswd) if STREXP: raise ftplib.Error(STREXP) else: for path in backPath.split('/'): try: FtpClient.ftp.cwd(path) msg = 'Get into' + path + ' successfully' logger.info(msg) except Exception, e: try: msg = 'Get into' + path + ' failed' logger.error(msg) time.sleep(random.uniform(3, 5)) FtpClient.ftp.mkd(path) msg = 'The dir' + path + ' was created successfully!' logger.info(msg) FtpClient.ftp.cwd(path) msg = 'Second entry' + path + ' successfully' logger.info(msg) except Exception, e: try: msg = 'create dir ' + path + ' failed!' logger.error(msg) time.sleep(random.uniform(3, 5)) FtpClient.ftp.cwd(path) msg = 'Try to entry ' + path + ' successfully!' logger.info(msg) except Exception, e: raise ftplib.Error(FTP_IP + " mkdir " + path + " failed:" + str(e))