def upload(self,path,type,name=None): try: fname = name if name else path.split('\\')[-1] ftps = FTP_TLS(self.host) ftps.login(self.user,self.password) ftps.prot_p() # force encoding to utf-8, this will let us to work with unicode file names ftps.encoding = "utf-8" ftps.cwd(self.user_dir) ftps.cwd(type) if type != 'files': # if type of requested file is screenshot or keylog # upload it to special folder on ftp today = date.today().strftime("%b-%d-%Y") if today not in ftps.nlst(): ftps.mkd(today) ftps.cwd(today) ftps.storbinary('STOR %s' % fname, open(path, 'rb')) return 'Upload Started!' if fname in ftps.nlst(): return 'File Already on FTP' ftps.storbinary('STOR %s' %fname, open(path,'rb')) ftps.close() return 'Upload Started!' except Exception as e: if e.args[0] == 0: return 'Uploaded to FTP!' return 'Upload Failed!'
def getfilelist(server, port, user, password, db): sqliteconnection = sqlite3.connect(db) sqlitecursor = sqliteconnection.cursor() sqlitecursor.execute('''CREATE TABLE IF NOT EXISTS files (date int, name text, CONSTRAINT 'id_UNIQUE' UNIQUE ('name'))''') sqliteconnection.commit() ftpsconnection = FTP_TLS() ftpsconnection.connect(server, port) ftpsconnection.auth() ftpsconnection.prot_p() ftpsconnection.login(user, password) ftpsconnection.prot_p() rootfiles = ftpsconnection.nlst() for i in range(0,5): episodes = ftpsconnection.nlst(rootfiles[i]) for episode in episodes: sqlitecursor.execute('''INSERT OR IGNORE INTO files VALUES ("%(date)d", "%(folder)s")''' % {'date': time.time(), 'folder': ("/" + rootfiles[i] + "/" + episode) } ) sqliteconnection.commit() sqliteconnection.close() ftpsconnection.quit() ftpsconnection.close()
def sendPackagesFtp(): print "---- Send packages by FTP" global serverFtp global userFtp global passFtp from ftplib import FTP_TLS ftps = FTP_TLS(serverFtp) ftps.set_debuglevel(1) ftps.login(userFtp, passFtp) ftps.prot_p() try: ftps.sendcmd('MKD ' + '/files/' + strProductVer) except Exception: print 'Directory already exists' ftps.cwd('/files/' + strProductVer) filesListFtp = ftps.nlst() filesList = os.listdir(packagesPath) newFilesList = [e for e in filesList if not(e in filesListFtp)] for fileName in newFilesList: ftps.storbinary('STOR ' + fileName, open(packagesPath + '\\' + fileName, 'rb')) ftps.quit()
def main(): global ftp_client scan_arguments() ftp_client = FTP(host) try: ftp_client.login(username, password) except ftplib.all_errors as e: print "ERROR: cannot login with username '{0}' and relative password.\nMessage returned from server:".format(username) print e return try: ftp_client.cwd(remote_dir) except ftplib.all_errors as e: print "ERROR: emote directory '{0}' not existing.\nMessage returned from server:".format(remote_dir) print e return else: files = ftp_client.nlst() print_directory_content(files) setup_folder() download_files(remote_dir, files) if compress: create_zip(local_dir) try: ftp_client.close() print "!!!!! OPERATION COMPLETED SUCCESSFULLY !!!!!" except ftplib.all_errors as e: print "ERROR: cannot close the connection properly.\nMessage from server:" print e
def get_ftp_data(self, cr, uid, ids, context={}): for chain in self.browse(cr, uid, ids, context=context): config_obj = chain.ftp_config_id try: conn = FTP_TLS(host=config_obj.host, user=config_obj.username, passwd=config_obj.passwd) conn.prot_p() except: conn = FTP(host=config_obj.host, user=config_obj.username, passwd=config_obj.passwd) filenames = conn.nlst() for filename in filenames: input_file = StringIO() conn.retrbinary('RETR %s' % filename, lambda data: input_file.write(data)) input_string = input_file.getvalue() input_file.close() csv_reader = unicode_csv_reader( StringIO(input_string), delimiter=str(chain.separator), quoting=(not chain.delimiter and csv.QUOTE_NONE) or csv.QUOTE_MINIMAL, quotechar=chain.delimiter and str(chain.delimiter) or None, charset=chain.charset) self.import_to_db(cr, uid, ids, csv_reader=csv_reader, context=context) conn.delete(filename) conn.quit() return True
def download_npc(): ftp_files = [] os_files = [] try: ftps = FTP_TLS() ftps.connect(CFG_FTPS_HOST, CFG_FTPS_PORT) log_it( 'connected to ' + CFG_FTPS_HOST + ' welcome message: ' + str(ftps.getwelcome()), 'info') ftps.login(CFG_FTPS_USER, CFG_FTPS_PASS) ftps.prot_p() log_it('changing dir: ' + CFG_FTPS_DIR, 'info') ftps.cwd(CFG_FTPS_DIR) ftp_files = ftps.nlst() for f in ftp_files: if not os.path.isfile(CFG_ARCHIVE_DIR + f): ftps.retrbinary('RETR ' + f, open(CFG_ARCHIVE_DIR + f, 'wb').write) log_it('downloading file ' + f, 'info') else: log_it( 'skipping ' + f + ' as it already exists in ' + CFG_ARCHIVE_DIR, 'debug') except ftplib.all_errors, e: log_it('unable to connect to ' + CFG_FTPS_HOST + ' %s' % e, 'error')
def download_all_in_one_path(targetdir,resultdir,check = True,num = 50): if(os.path.exists(resultdir) == False): os.makedirs(resultdir) ftp = FTP('129.164.179.23') ftp.login() ftp.prot_p() ftp.cwd(targetdir) files = ftp.nlst() target = 'https://heasarc.gsfc.nasa.gov/FTP' + targetdir c = None if(check): c = [] data1 = [] ftp.voidcmd('TYPE I') print('正在获取校验信息........') for i in files: #print(i) data = os.path.join(target,i) print(data) data1.append(data) if(check): c.append(ftp.size(i)) ftp.quit() if(check == False): print('忽略数据大小校验。') print('正在校验...............') down(data1,resultdir,check=c,threadnum = num) print('\n任务下载完成!!!')
def _get_file_list(self, ftp: FTP_TLS) -> List[str]: files = [] try: files = ftp.nlst() return files except ftplib.error_perm as resp: if str(resp) == "550 No files found": print("No files in this directory") else: raise return files
def get_jobs_data(): ftp = FTP_TLS('ontario.files.com', timeout=10) ftp.login(user=os.environ['211_username'], passwd=os.environ['211_password']) ftp.cwd('/211projects/BensTeam') ftp.prot_p() files = ftp.nlst() for filename in files: if not os.path.isfile('211_data/' + filename): print(f"Getting file {filename}") ftp.retrbinary("RETR " + filename, open('211_data/' + filename, 'wb').write) ftp.quit() return 'Done'
def FTP_DOWNLOAD(self): """Connect to companys's FTP and download the zip folder within""" password = '******' directory = '/folder/' ftps = FTP_TLS('123.456.789.000') ftps.login(user='******', passwd=password) ftps.prot_p() cur_dir = ftps.cwd(directory) new_files = ftps.nlst() print(ftps.pwd()) for items in new_files: if items.startswith('987654321') and items.endswith('.zip'): print(items) local_file = os.path.join(chksrc, items) f = open(local_file, 'wb') ftps.retrbinary('RETR %s' % items, f.write) f.close() return True
def getfiles(server, port, user, password, db): sqliteconnection = sqlite3.connect(db) sqlitecursor = sqliteconnection.cursor() sqlitecursor.execute('''CREATE TABLE IF NOT EXISTS latest (date int, CONSTRAINT 'id_UNIQUE' UNIQUE ('date'))''') sqliteconnection.commit() sqlitecursor.execute('''SELECT date FROM files WHERE date = (SELECT MAX(date) FROM files) LIMIT 1''') latestfile = sqlitecursor.fetchone() sqlitecursor.execute('''SELECT date FROM latest WHERE date = (SELECT MAX(date) FROM latest) LIMIT 1''') latestfetch = sqlitecursor.fetchone() if latestfetch is None: latestfetch = 0 if latestfetch < latestfile: ftpsconnection = FTP_TLS() ftpsconnection.connect(server, port) ftpsconnection.auth() ftpsconnection.prot_p() ftpsconnection.login(user, password) ftpsconnection.prot_p() sqlitecursor.execute('''SELECT name FROM files WHERE date > %d''' % latestfetch) filestofetch = sqlitecursor.fetchall() for currfile in filestofetch: ftpsconnection.cwd(currfile[0]) filenames = ftpsconnection.nlst() for filename in filenames: print 'Now saving /mnt/folder' + currfile[0] + '/' + filename localfile = open('/mnt/folder' + currfile + '/' + filename, 'wb') ftpsconnection.retrbinary('RETR ' + filename, localfile.write) localfile.close() sqliteconnection.execute('''INSERT OR IGNORE INTO latest VALUES (%d)''' % time.time()) sqliteconnection.commit() sqliteconnection.close() ftpsconnection.quit() ftpsconnection.close()
def connect(velkost_ftp,port): ftp=FTP_TLS(server,meno2,ps,port) ftp.prot_p() ftp.cwd(my_list[2]) print "Posielam subor. Cakajte prosim." obsah=open(file_to_send, 'rb') obsah.close() ftp.storbinary('STOR %s' % file_to_send, open(file_to_send, 'rb')) obsah.close() print "Subor odoslany [OK]" print "Obsah adresara na serveri:" ftp.retrlines("LIST") size_ftp=ftp.nlst() pocet=len(size_ftp) velkost_ftp_subor=size_ftp[pocet-1] #berie posledne pridany subor zo zoznamu ftp.sendcmd("TYPE i") velkost_ftp=ftp.size(velkost_ftp_subor) ftp.close() return velkost_ftp
def get_ftp_data(self, cr, uid, ids, context={}): for chain in self.browse(cr, uid, ids, context=context): config_obj = chain.ftp_config_id try: conn = FTP_TLS(host=config_obj.host, user=config_obj.username, passwd=config_obj.passwd) conn.prot_p() except: conn = FTP(host=config_obj.host, user=config_obj.username, passwd=config_obj.passwd) filenames = conn.nlst() for filename in filenames: input_file = StringIO() conn.retrbinary('RETR %s' % filename, lambda data: input_file.write(data)) input_string = input_file.getvalue() input_file.close() csv_reader = unicode_csv_reader(StringIO(input_string), delimiter=str(chain.separator), quoting=(not chain.delimiter and csv.QUOTE_NONE) or csv.QUOTE_MINIMAL, quotechar=chain.delimiter and str(chain.delimiter) or None, charset=chain.charset) self.import_to_db(cr, uid, ids, csv_reader=csv_reader, context=context) conn.delete(filename) conn.quit() return True
def get_confidential_211_referrals(): item = {'classification':'confidential', 'source_name':'211', 'table_name':'referrals', 'type': 'csv'} file_path, save_dir = get_file_path(item) Path(save_dir).mkdir(parents=True, exist_ok=True) ftp = FTP_TLS('ontario.files.com',timeout=10) ftp.login(user=os.environ['USERNAME_211'], passwd=os.environ['PASSWORD_211']) ftp.cwd('/211projects/BensTeam') ftp.prot_p() files = ftp.nlst() for filename in files: names = filename.split('-') if not 'Referrals' in names: continue date = filename.split('-Created-')[-1] file_path, save_dir = get_file_path(item,date) if not os.path.isfile(file_path): print(f"Getting file {filename}") ftp.retrbinary("RETR " + filename ,open(file_path, 'wb').write) ftp.quit() return True
def generar_nombre_no_repetido(self, extension): ftps = FTP_TLS() ftps.connect('192.168.1.26') ftps.sendcmd('USER ftp_user') ftps.sendcmd('PASS ftp_user') existe = False nombre_generado = '' while (existe == False): files = [] nombre_generado = self.random_word() + '.' + extension #print nombre_generado try: files = ftps.nlst() if nombre_generado not in files: existe = True except ftplib.error_perm, resp: if str(resp) == "550 No files found": print "No files in this directory" else: raise
def exploreAndProcessFTPFolder(folderToExplore): ftp = FTP_TLS('opendata-rncs.inpi.fr') ftp.login(user='******', passwd='xxx') ftp.prot_p() elementList = ftp.nlst(folderToExplore) ftp.quit() for element in elementList: if element.endswith(".zip"): localFileName = os.path.basename(element) localfile = open(localFileName, 'wb') print("Downloading and processing file " + element) ftp = FTP_TLS('opendata-rncs.inpi.fr') ftp.login(user='******', passwd='xxx') ftp.prot_p() ftp.retrbinary("RETR " + element, localfile.write) ftp.quit() processOneDayZippedFile(localFileName) elif element.endswith(".md5"): print("md5 file to ignore ^^ :" + element) else: print("exploring " + element) exploreAndProcessFTPFolder(element)
class ServerWatcher(Watcher): downloadProgress = Signal((int, int,)) uploadProgress = Signal((int, int,)) # Si added: textStatus = Signal((str,)) fileEvent = Signal((str,)) fileEventCompleted = Signal() loginCompleted = Signal((bool, str,)) badFilenameFound = Signal((str,)) LOCATION = 'server' TEST_FILE = 'iqbox.test' def __init__(self, host, ssl, parent=None): """ Initializes parent class and attributes. Decides whether to use `FTP_TLS` or `FTP` based on the `ssl` param. :param host: Location of the FTP server :param ssl: Tells whether the FTP needs to support TLS or not :param parent: Reference to a `QObject` instance a parent """ super(ServerWatcher, self).__init__(parent) self.interval = 5000 self.localdir = '' self.deleteQueue = [] self.downloadQueue = [] self.uploadQueue = [] self.warnedNames = [] self.ftp = None self.useSSL = ssl self.host = host self.preemptiveCheck = False self.preemptiveActions = [] self.testFile = 'iqbox.test' @property def currentdir(self): """Returns the current working directory at the server""" return self.ftp.pwd() def setLocalDir(self, localdir): """ Sets the local directory used to stored all downloaded files. Creates the directory if needed. :param localdir: Absolute path to local directory """ self.localdir = localdir if not os.path.exists(self.localdir): os.makedirs(self.localdir) @pause_timer @Slot() def checkout(self): """ Recursively checks out all files on the server. Returns a dictionary of files on the server with their last modified date. :param download: Indicates whether or not the files should be downloaded """ # Check `self.deleteQueue`, `self.uploadQueue` and `self.downloadQueue` queues. # These tasks are done in queues to make sure all FTP commands # are done sequentially, in the same thread. self.deleteAll() self.uploadAll() self.downloadAll() # Handy list to keep track of the checkout process. # This list contain absolute paths only. checked_dirs = list() # Sets '/' as initial directory and initializes `downloading_dir` self.ftp.cwd('/') downloading_dir = self.currentdir check_date = dt.utcnow() sidirlist = list() root_cached = False fileC = 0 while True: # Gets the list of sub directories and files inside the # current directory `downloading_dir`. self.textStatus.emit('Remote scan- Downloading folder list of '+downloading_dir+'...') if root_cached and downloading_dir == '/': dir_subdirs = saved_root_dirs dirfiles = saved_root_files else: dir_subdirs = self.getDirs(downloading_dir) if downloading_dir == '/': saved_root_dirs = dir_subdirs # sidirlist.extend(dir_subdirs) self.textStatus.emit('Remote scan- Downloading files list of '+downloading_dir+'...') dirfiles = self.getFiles(downloading_dir) if downloading_dir == '/': saved_root_files = dirfiles root_cached = True # Leading '/' in `downloading_dir` breaks the `os.path.join` call localdir = os.path.join(self.localdir, downloading_dir[1:]) if not os.path.exists(localdir): # Creates the directory if it doesn't already exists. os.makedirs(localdir) for file_ in dirfiles: # `serverpath` is the absolute path of the file on the server, # download it only if it hasn't been already downloaded serverpath = os.path.join(downloading_dir, file_) serverpath = QDir.fromNativeSeparators(serverpath) server_file = File.fromPath(serverpath) self.textStatus.emit('Scanning remote file... '+serverpath+'...') # How do we know if we should check this server file? # We see if the date last checked is the check start time. if server_file.last_checked_server != check_date: # Do this process only once per file # Added by Simon # Give feedback on scanning of files. fileC += 1 if fileC % 1 == 2: self.textStatus.emit('Scanning remote files for changes, '+str(fileC)+' files scanned.') # STEP: IS THIS THE FIRST TIME WE SAW THE FILE, OR WAS IT ALREADY IN OUR DB? just_added = not server_file.inserver # STEP: IF ITS A NEW FILE, ENSURE WE DONT WANT TO SKIP IT # Example: If it's a temporary file, or a Unix file with a name we don't support. if just_added: filename = os.path.basename(serverpath) if platform.system() == 'Windows': badName = False for chr in ['\\', '/', ':', '?', '"', '<', '>', '|']: if chr in filename: badName = True break if badName: if filename not in self.warnedNames: self.warnedNames.append(filename) self.badFilenameFound.emit(filename) continue # STEP: ASSUMING THE FILE DID EXIST IN OUR DB, LETS SAVE THE LAST MODIFICATION DATE lastmdate = server_file.servermdate # STEP: SAVE THE MOD DATE TO A VARIABLE # Now we get the last mod time. # We expect this to work fine since this file # was found on the server servermdate = self.lastModified(serverpath) # STEP: SET BOOL SHOWING THAT IT WAS ON THE SERVER, SINCE WE KNOW IT IS. server_file.inserver = True # STEP: SET THE TIME THE FILE WAS LAST CHECKED TO THE SCAN START TIME server_file.last_checked_server = check_date # STEP: SET THE MOD DATE IN THE DATABASE TO THE ONE WE JUST GOT server_file.servermdate = servermdate # STEP: SAVE THIS CHANGE TO THE DATABASE server_file.session.commit() delta = 0 if server_file.inlocal: delta = server_file.timeDiff() # Emit the signals after the attributes has been set and committed if just_added is True: self.fileAdded.emit(ServerWatcher.LOCATION, serverpath) elif server_file.servermdate > lastmdate or delta < -Watcher.TOLERANCE: self.fileChanged.emit(ServerWatcher.LOCATION, serverpath, False) #END FOR self.textStatus.emit('Remote scan- Finding next folder...') dir_ready = True for dir_ in dir_subdirs: # `dirpath` is the absolute path of the subdirectory on the server, dirpath = QDir.fromNativeSeparators(os.path.join(downloading_dir, dir_)) # `downloading_dir` is ready only when all its subdirectory are on the # `checked_dirs` list. if dirpath not in checked_dirs: # Found one subdirectory that is not on `checked_dirs`, # will process it in the next iteration. downloading_dir = dirpath dir_ready = False break if dir_ready is True: # All subdirectories of `downloading_dir` are already in `checked_dirs` if downloading_dir == '/': # All directories ready and at '/', means checkout is complete # So, exit the main While loop!! break else: # Not at '/'. Current directory is ready so is appended to `checked_dirs` # Back one directory to find directories that are not in `checked_dirs` checked_dirs.append(downloading_dir) downloading_dir = os.path.dirname(downloading_dir) self.textStatus.emit('Remote scan- Found Folder...') ##### END OF WHILE ################ ################################################################### # Deleted files are the ones whose `last_checked_server` attribute # didn't get updated in the recursive run. session = Session() deleted = session.query(File).filter(File.last_checked_server < check_date).filter(File.inserver == True) for file_ in deleted: self.fileDeleted.emit(ServerWatcher.LOCATION, file_.path) # Wraps up the checkout process, commits to the database. session.commit() @Slot() def onLogin(self, username, passwd): ok = True msg = '' error_msg = 'Login failed.' try: if not self.ftp: self.ftp = FTP_TLS(self.host) if self.useSSL is True else FTP(self.host) loginResponse = self.ftp.login(username, passwd) except socket.gaierror: self.ftp = None ok = False msg = 'Server address could not be found.' except (error_perm, error_reply): info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) ok = False msg = error_msg else: if '230' in loginResponse: ok = True else: ok = False msg = error_msg if ok: # Logged in. Now let's do compability tests. if not self.testPermissions(): # User doesn't have write permissions, don't bother doing next test. ok = False msg = 'It seems like you do not have write access to this server.' else: # Permissions test passed, now let's test MFMT for timestamp modification. if not self.testMFMT(): ok = False msg = 'This server does not support timestamp modification\n \ need by this application.' self.loginCompleted.emit(ok, msg) def getFiles(self, path): """ This method simply wraps the `nlst` method with an exception handler, and returns an empty list in case an exception is caught. :param path: Relative or absolute path on the server """ try: nlst = self.ftp.nlst(path) dirs = self.getDirs(path) # Files are items in nlst that are not in dirs files = [item for item in nlst if os.path.basename(item) not in dirs] return files except: print 'Exception in ServerWatcher.getDirs' info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) return [] def getDirs(self, path): """ Retrieves a list of the directories inside `path`, uses `retrlines` and the LIST command to retrieve the items. :param path: Relative or absolute path on the server """ dirs = list() def handleLine(line): """ Recieves a line from the LIST command. This function is meant to be used as callback for the `retrlines` method. :params line: Line from the LIST command """ if line.startswith('d'): # Only lines starting with 'd' are directories # Parse the directory out of the line; lines look like: # 'drwxrwxrwx 1 user group 0 Jun 15 2012 dirname' dirname = line[55:].strip() if dirname != '.' and dirname != '..': # Ignoring '.' and '..' entries dirs.append(dirname) try: self.ftp.retrlines('LIST %s' % path, handleLine) return dirs except: print 'Exception in ServerWatcher.getDirs' info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) return [] @upload_test def testPermissions(self): # For interface purposes. upload_test takes care of everything. return True @upload_test def testMFMT(self): # Absurd date to test whether the change really happened. time = dt.utcfromtimestamp(100000000) try: self.setLastModified(self.testFile, time) otherTime = self.lastModified(self.testFile) diff = (time - otherTime).total_seconds() if abs(diff) < 2: # Let's give it a 2 seconds tolerance. mdtm = True else: mdtm = False except (ValueError, error_reply, error_perm): info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) mdtm = False return mdtm @Slot(str) def onDelete(self, filename): self.deleteQueue.append(filename) def deleteNext(self): if len(self.deleteQueue) > 0: next = self.deleteQueue.pop(0) self.deleteFile(next) def deleteAll(self): for filename in self.deleteQueue: self.deleteFile(filename) self.deleteQueue = [] @Slot(str) def deleteFile(self, filename): """ Deletes the file `filename` to the server :param filename: Absolute or relative path to the file """ try: print 'Deleting %s' % filename self.ftp.delete(filename) return True except (error_reply, error_perm): print 'Error deleting %s' % filename return False self.fileEventCompleted.emit() @Slot(str) def onDownload(self, filename): self.downloadQueue.append(filename) def downloadNext(self): if len(self.downloadQueue) > 0: next = self.downloadQueue.pop(0) self.downloadFile(next) def downloadAll(self): for filename in self.downloadQueue: self.downloadFile(filename) self.downloadQueue = [] @Slot(str, str) def downloadFile(self, filename, localpath=None): """ Performs a binary download to the file `filename` located on the server. `filename` parameter can be either absolute or relative, though it can fail for relative paths if the current directory is not appropiate. :param filename: Relative or absolute path to the file :param localpath: Absolute local path where the file will be saved """ def handleChunk(chunk): """ Receives chuncks of data downloaded from the server. This function is meant to be used as callback for the `retrbinary` method. :params chunk: Chunk of downloaded bytes to be written into the file """ # Simply writes the received data into the file `self.downloading` self.downloading.write(chunk) self.download_progress += len(chunk) self.downloadProgress.emit(self.download_size, self.download_progress) if localpath is None: localpath = self.localFromServer(filename) localdir = os.path.dirname(localpath) if not os.path.exists(localdir): # Creates the directory if it doesn't already exists. os.makedirs(localdir) print 'Downloading: %s to %s' % (filename, localpath) try: with open(localpath, 'wb') as f: # Opens the file at `localname` which will hold the downloaded file. # Object attributes regarding download status are updated accordingly. self.fileEvent.emit(filename) self.downloading = f self.download_progress = 0 self.download_size = int(self.ftp.sendcmd('SIZE %s' % filename).split(' ')[-1]) self.ftp.retrbinary('RETR %s' % filename, handleChunk) print 'Download finished' # Let's set the same modified time to that on the server. with File.fromPath(filename) as downloadedfile: mdate = LocalWatcher.lastModified(localpath) downloadedfile.localmdate = mdate downloadedfile.servermdate = mdate self.setLastModified(filename, mdate) downloaded = True except (IOError, OSError): downloaded = False self.ioError.emit(localpath) except (error_reply, error_perm) as ftperr: print 'Error downloading %s, %s' % (filename, ftperr) downloaded = False # TODO: Sometimes the file doesn't complete properly. # in that case we maybe shouldn't call this? self.fileEventCompleted.emit() return downloaded @Slot(str) def onUpload(self, filename): self.uploadQueue.append(filename) def uploadNext(self): if len(self.uploadQueue) > 0: next = self.uploadQueue.pop(0) self.uploadFile(next) def uploadAll(self): for filename in self.uploadQueue: self.uploadFile(filename) self.uploadQueue = [] @Slot(str) def uploadFile(self, filename): """ Uploads the file `filename` to the server, creating the needed directories. :param filename: Absolute or relative path to the file """ def handle(buf): """This function is meant to be used as callback for the `storbinary` method.""" self.upload_progress += 1024 self.uploadProgress.emit(self.upload_size, self.upload_progress) # Creates the directory where the file will be uploaded to self.mkpath(os.path.dirname(filename)) localpath = self.localFromServer(filename) print 'Uploading %s to %s' % (localpath, filename) try: # Uploads file and updates its modified date in the server # to match the date in the local filesystem. self.upload_progress = 0 self.upload_size = os.path.getsize(localpath) self.fileEvent.emit(localpath) self.ftp.storbinary('STOR %s' % filename, open(localpath, 'rb'), 1024, handle) print 'Upload finished' with File.fromPath(filename) as uploaded: modified = uploaded.localmdate uploaded.servermdate = modified self.setLastModified(filename, modified) uploaded = True except (IOError, OSError): uploaded = False self.ioError.emit(localpath) except (error_reply, error_perm, OSError) as err: print 'Error uploading %s, %s' % (filename, err) uploaded = False # TODO: Sometimes the file doesn't complete properly. # in that case we maybe shouldn't call this? self.fileEventCompleted.emit() return uploaded def lastModified(self, filename): """ Uses the MDTM FTP command to find the last modified timestamp of the file `filename`. Returns a `datetime.datetime` object in UTC representing the file's last modified date and time. :param filename: Relative or absolute path to the file """ timestamp = self.ftp.sendcmd('MDTM %s' % filename) if '213 ' not in timestamp: # Second chance was found to be needed in some cases. timestamp = self.ftp.sendcmd('MDTM %s' % filename) timestamp = timestamp.split(' ')[-1] dateformat = '%Y%m%d%H%M%S.%f' if '.' in timestamp else '%Y%m%d%H%M%S' try: mtime = dt.strptime(timestamp, dateformat) except ValueError: mtime = dt.utcnow() return mtime def setLastModified(self, serverpath, newtime): """ Uses the MFMT or MDTM FTP commands to set `newtime` as the modified timestamp of the file `serverpath` on the server. :param serverpath: Relative or absolute path to the file :param newtime: datedatime object holding the required time """ cmds = ['MFMT', 'MDTM'] for cmd in cmds: try: self.ftp.sendcmd( '%s %s %s' % (cmd, newtime.strftime('%Y%m%d%H%M%S'), serverpath)) return except (error_perm, error_reply) as e: if cmd == cmds[len(cmds) - 1]: # If is the last comand, re-raise the exception, else # keep trying. raise e else: continue def mkpath(self, path): """ Creates the path `path` on the server by recursively created folders, if needed. :param path: Absolute path on the server to be created """ try: self.ftp.cwd(path) except error_perm: # `cwd` call failed. Need to create some folders make_dir = '/' steps = path.split('/') for step in steps: if len(step) == 0: continue make_dir += '%s/' % step try: self.ftp.mkd(make_dir) except error_perm: # Probably already exists continue else: # `cwd` call succeed. No need to create # any folders self.ftp.cwd('/') return @Slot(str, str) def added(self, location, serverpath): super(ServerWatcher, self).added(location, serverpath) def actionFromPath(serverpath): f = File() fileExistsOnServer = True try: f.servermdate = self.lastModified(serverpath) except error_perm: fileExistsOnServer = False f.servermdate = 0 f.localmdate = LocalWatcher.lastModified(self.localFromServer(serverpath)) diff = f.timeDiff() action = None if abs(diff) > Watcher.TOLERANCE: if not fileExistsOnServer or diff > 0: action = FileAction(serverpath, FileAction.UPLOAD, ServerWatcher.LOCATION) else: action = FileAction(serverpath, FileAction.DOWNLOAD, LocalWatcher.LOCATION) return action if self.preemptiveCheck: if location == ServerWatcher.LOCATION: localpath = self.localFromServer(serverpath) if not os.path.exists(localpath): action = FileAction(serverpath, FileAction.DOWNLOAD, ServerWatcher.LOCATION) self.preemptiveActions.append(action) else: action = actionFromPath(serverpath) if action is not None: self.preemptiveActions.append(action) elif location == LocalWatcher.LOCATION: try: self.ftp.sendcmd('SIZE %s' % serverpath) except (error_reply, error_perm): exists = False else: exists = True if not exists: action = FileAction(serverpath, FileAction.UPLOAD, LocalWatcher.LOCATION) self.preemptiveActions.append(action) else: action = actionFromPath(serverpath) if action is not None: self.preemptiveActions.append(action) @Slot(str, str) def changed(self, location, serverpath): super(ServerWatcher, self).changed(location, serverpath) @Slot(str, str) def deleted(self, location, serverpath): super(ServerWatcher, self).deleted(location, serverpath) with File.fromPath(serverpath) as deleted: deleted.inserver = False
from ftplib import FTP_TLS import os ftps = FTP_TLS(timeout=100) ftps.set_debuglevel(1) ftps.connect("192.168.1.35", 21) ftps.auth() ftps.prot_p() ftps.login('pan', '1') print(ftps.getwelcome()) print(ftps.pwd()) ftps.dir() # 下载文件 os.chdir(r'D:\Desktop\FTP_TLS\FTP\00临时存储') ftps.cwd('/') ftps.nlst() # 获取目录下的文件 filename = 'vsftpd.key' file_handle = open(filename, "wb").write ftps.retrbinary('RETR %s' % os.path.basename(filename), file_handle, blocksize=1024) # 下载ftp文件 # 上传到服务器 bufsize = 1024 localpath = 'D:\\Desktop\\FTP_TLS\FTP\\00临时存储\\test.txt' remotepath = '/test.txt' fp = open(localpath, 'rb') ftps.storbinary('STOR ' + remotepath, fp, bufsize) # 打印证书 print(ftps.context) print(ftps.certfile)
processSuccess = False if useNetworkShare: # delete after time limit try: onlyfiles = [f for f in listdir(netShare) if isfile(join(netShare, f))] deleteFiles(onlyfiles,'network') except Exception as e: applogger.debug('Error with network file delete: %s', e) processSuccess = False if useFTP: # delete after time limit try: ftps = FTP_TLS(ftphost) ftps.login(ftpuser, ftppass) ftps.prot_p() files = ftps.nlst() deleteFiles(files,'ftp') except Exception as e: applogger.debug('Error with directory listing: %s', e) processSuccess = False finally: ftps.quit() def deletedErrorMessage(count,delType): applogger.info('Removed ' + str(count) + ' ' + delType + ' Backups') if count_deleted_local > 0: deletedErrorMessage(count_deleted_local,'local') if count_deleted_network > 0: deletedErrorMessage(count_deleted_network,'network') if count_deleted_ftp > 0:
def load_date( self, date_str ) : # Determine whether or not the requested date has already been # loaded. If it has, abort. arr = [ date for date in self.arr_date if date == date_str ] if ( len( arr ) > 0 ) : return # Extract the year, month, and day portions of the "date_str" # string. str_year = date_str[0:4] str_mon = date_str[5:7] str_day = date_str[8:10] # Determine the name of the file that contains data from the # requested date. fl0 = 'wi_sw-ion-dist_swe-faraday_' + \ str_year + str_mon + str_day + '_v??.cdf' fl0_path = os.path.join( self.path, fl0 ) gb = glob( fl0_path ) # returns all files with # common names in argument # If the file does not exist, attempt to download it. if ( len( gb ) > 0 ) : # Take the last one : gb[-1] fl_path = gb[-1] else : try : self.mesg_txt( 'ftp', date_str ) ftp = FTP_TLS( 'spdf.gsfc.nasa.gov' ) ftp.login( ) ftp.cwd( 'pub/data/wind/swe/swe_faraday/' ) ftp.cwd( str_year ) ls = ftp.nlst( fl0 ) fl = ls[-1] fl_path = os.path.join( self.path, fl ) ftp.retrbinary( "RETR " + fl, open( fl_path, 'wb' ).write ) except : self.mesg_txt( 'fail', date_str ) return # If the file now exists, try to load it; otherwise, abort. self.mesg_txt( 'load', date_str ) if ( os.path.isfile( fl_path ) ) : try : cdf = pycdf.CDF( fl_path ) except : self.mesg_txt( 'fail', date_str ) return else : self.mesg_txt( 'fail', date_str ) return # Add the CDF object and tags for each spectrum to the arrays. c = len( self.arr_cdf ) self.arr_cdf = self.arr_cdf + [ cdf ] # arr_cdf and self.arr_date = self.arr_date + [ date_str ] # arr_date of # same size n_spec = len( cdf['Epoch'] ) self.arr_tag = self.arr_tag + [ fc_tag( c=c, s=s, epoch=cdf['Epoch'][s] ) for s in range( n_spec ) ] self.arr_tag = sorted( self.arr_tag, key=attrgetter('epoch') )
CSV2 = '_Inverter2.csv' CSV3 = '_Inverter3.csv' CSV4 = '_Inverter4.csv' CSV = [CSV1, CSV2, CSV3, CSV4] foldername = str(datetime.date.today()) PATH_TO_SAVE_CVSDIRECTORIES = '/root/regs/' client1 = mqtt.Client() try: ftps = FTP_TLS('host') ftps.login('user', 'password') # login anonymously before securing control channel ftps.prot_p() # switch to secure data connection ftps.cwd('dir') # execute a cd command to access to directory if (not (foldername in ftps.nlst())): ftps.mkd(foldername) #create a new folder with current date ftps.cwd(str(datetime.date.today())) #access to current date folder for i in range(0, len(CSV)): filename = str(PATH_TO_SAVE_CVSDIRECTORIES + str(datetime.date.today()) + '/' + str(datetime.date.today()) + CSV[i]) file = open(filename, 'rb') filename2 = str(datetime.date.today()) + CSV[i] ftps.storbinary('STOR ' + filename2, file) file.close() ftps.quit() print("End try") except:
def genImage(task, bound): time_start = datetime.now() try: regions = [] regions3d = [] if not ftp_scan: if skip_empty_regions: raw_files = os.listdir(region3d_folder) for raw_file in raw_files: if raw_file.endswith(".3dr"): fl = os.path.join(region3d_folder, raw_file).replace("\\","/") fileinfo = QFileInfo(fl) filename = fileinfo.completeBaseName() m = IS_REGION3D_FILENAME.match(filename) if m: x = int(m.group(1)) z = int(m.group(3)) y = int(m.group(2)) if not scan_allfiles: if x < bound.minX and x > bound.maxX and z < minZ and z > maxZ: continue regions3d.append(Region3D(x,z,y)) raw_files = [] raw_files = os.listdir(region2d_folder) for raw_file in raw_files: if raw_file.endswith(".2dr"): fl = os.path.join(region2d_folder, raw_file).replace("\\","/") fileinfo = QFileInfo(fl) filename = fileinfo.completeBaseName() m = IS_REGION_FILENAME.match(filename) if m: x = int(m.group(1)) z = int(m.group(2)) if not scan_allfiles: if x < bound.minX and x > bound.maxX and z < minZ and z > maxZ: continue #QgsMessageLog.logMessage('Region x: {x}, z: {z}'.format(x=str(x),z=str(z)), CATEGORY, Qgis.Info) if skip_empty_regions: reg3d = list(filter(lambda f: f.x >> 1 == x and f.z >> 1 == z, regions3d)) if reg3d is not None and any(reg3d): regions.append(Region(x,z, reg3d)) else: regions.append(Region(x,z, None)) else: try: ftp = None if ftp_s: ftp = FTP_TLS() else: ftp = FTP() ftp.connect(ftp_url, ftp_port) if ftp_user is None or ftp_password is None: ftp.login() else: ftp.login(user=ftp_user, passwd=ftp_password) if ftp_save_folder is not None: try: ftp.cwd(ftp_save_folder) if skip_empty_regions: ftp.cwd('region3d') remote3d_files = ftp.nlst() for rf in remote3d_files: m = IS_REGION3D_FILE.match(rf) if m: x = int(m.group(1)) z = int(m.group(3)) y = int(m.group(2)) if not scan_allfiles: if x < bound.minX and x > bound.maxX and z < minZ and z > maxZ: continue regions3d.append(Region3D(x,z,y)) ftp.cwd('../') ftp.cwd('region2d') remote_files = ftp.nlst() for rf in remote_files: m = IS_REGION_FILE.match(rf) if m: x = int(m.group(1)) z = int(m.group(2)) if not scan_allfiles: if x < bound.minX and x > bound.maxX and z < minZ and z > maxZ: continue if skip_empty_regions: reg3d = list(filter(lambda f: f.x >> 1 == x and f.z >> 1 == z, regions3d)) if reg3d is not None and any(reg3d): regions.append(Region(x,z, reg3d)) else: regions.append(Region(x,z, None)) except: QgsMessageLog.logMessage('Error: Path does not exist on FTP server', CATEGORY, Qgis.Info) return None ftp.quit() except Exception as e: QgsMessageLog.logMessage('No 2dr files found or wrong ftp options. Error: ' + str(e), CATEGORY, Qgis.Info) return None if scan_allfiles: if(len(regions) > 0): bound.minX = min(regions, key=lambda e: e.x).x bound.minZ = min(regions, key=lambda e: e.z).z bound.maxX = max(regions, key=lambda e: e.x).x bound.maxZ = max(regions, key=lambda e: e.z).z QgsMessageLog.logMessage('Bounds set to Top({top_x}, {top_z}), Bottom({bottom_x}, {bottom_z})'.format(top_x=str(bound.minX),top_z=str(bound.minZ),bottom_x=str(bound.maxX),bottom_z=str(bound.maxZ)), CATEGORY, Qgis.Info) else: QgsMessageLog.logMessage('No 2dr files found', CATEGORY, Qgis.Info) return None if len(regions) == 0: QgsMessageLog.logMessage('No 2dr files found', CATEGORY, Qgis.Info) return None else: QgsMessageLog.logMessage('Creating image out of {count} 2dr files'.format(count=str(len(regions))), CATEGORY, Qgis.Info) range_width = abs(bound.maxX - bound.minX) + 1 range_height = abs(bound.minZ - bound.maxZ) + 1 width = int(round(range_width * scale)) height = int(round(range_height * scale)) temp_file = output_file + ".tif" red = np.zeros(shape=(1,width), dtype=np.byte) green = np.zeros(shape=(1,width), dtype=np.byte) blue = np.zeros(shape=(1,width), dtype=np.byte) alpha = np.zeros(shape=(1,width), dtype=np.byte) regions.sort(key = lambda ee: ee.z) out_driver = gdal.GetDriverByName('GTiff') dst_src = out_driver.Create(temp_file, width, height, 4, gdal.GDT_Byte) cf = height rt = 0 #Write empty rows for tty in range(0, height): writeRow(dst_src, red, green, blue, alpha, tty) rt += 1 task.setProgress(max(0, min(int(((rt * 50) / cf)), 100))) rc = 0 cc = len(regions) y = None t_width = width - (1 * scale) t_height = height - (1 * scale) t_range_width = range_width - 1 t_range_height = (range_height - 1) for reg in regions: coord_x = reg.x - bound.minX coord_y = reg.z - bound.minZ x = max(0, min(int(round((coord_x * t_width) / t_range_width)), width - 1)) y = max(0, min(int(round((coord_y * t_height) / t_range_height)), height - 1)) if scale <= 1.0: red = dst_src.GetRasterBand(1).ReadAsArray(yoff=y,win_xsize=width, win_ysize=1) green = dst_src.GetRasterBand(2).ReadAsArray(yoff=y,win_xsize=width, win_ysize=1) blue = dst_src.GetRasterBand(3).ReadAsArray(yoff=y,win_xsize=width, win_ysize=1) alpha = dst_src.GetRasterBand(4).ReadAsArray(yoff=y,win_xsize=width, win_ysize=1) red[0][x] = pixel_color[0] green[0][x] = pixel_color[1] blue[0][x] = pixel_color[0] alpha[0][x] = 255 writeRow(dst_src, red, green, blue, alpha, y) else: for yt in range(int(scale)): yt_off = y + yt if yt_off > height - 1: yt_off = y - yt try: red = dst_src.GetRasterBand(1).ReadAsArray(yoff=yt_off,win_xsize=width, win_ysize=1) green = dst_src.GetRasterBand(2).ReadAsArray(yoff=yt_off,win_xsize=width, win_ysize=1) blue = dst_src.GetRasterBand(3).ReadAsArray(yoff=yt_off,win_xsize=width, win_ysize=1) alpha = dst_src.GetRasterBand(4).ReadAsArray(yoff=yt_off,win_xsize=width, win_ysize=1) for xt in range(int(scale)): xt_off = x + xt if xt_off > width - 1: xt_off = x - xt red[0][xt_off] = pixel_color[0] green[0][xt_off] = pixel_color[1] blue[0][xt_off] = pixel_color[2] alpha[0][xt_off] = 255 writeRow(dst_src, red, green, blue, alpha, yt_off) except Exception as e: QgsMessageLog.logMessage('Error while filling pixel: {error}, Y off {ytoff}'.format(error=str(e),ytoff=yt_off), CATEGORY, Qgis.Info) return None rc += 1 task.setProgress(max(0, min(int((rc * 50) / cc) + 50, 100))) #Save copy QgsMessageLog.logMessage('Creating a PNG copy. This might take a while', CATEGORY, Qgis.Info) #gdal.GetDriverByName('PNG').CreateCopy(output_file, clip_ras) gdal.Translate(output_file, dst_src, options=["ZLEVEL=9"], format="PNG") dst_src = None os.remove(temp_file) sleep(0.05) return time_start except Exception as e: QgsMessageLog.logMessage('Error: ' + str(e), CATEGORY, Qgis.Info) return None
lg.info('Send wordpress backup folder to FTP server') try: ftps = FTP_TLS(timeout=100) ftps.connect(ftp_ip, ftp_port) ftps.auth() ftps.prot_p() ftps.set_pasv(True) ftps.login(ftp_login, ftp_password) print('Connect to FTP ...') print(ftps.getwelcome()) except ftplib.error_perm as e: print('Ftp fail -> ', e) ftps.nlst() files_list = ftps.nlst() lg.info("List of FTP remote folders:") for filename in files_list: lg.debug(f'{filename} ') if folder in ftps.nlst( ): #check if 'wordpress-todayDate' exist inside '/home/wordpress' ftps.cwd(folder) #change into "wordpress-todayDate" directory lg.debug(f'{folder} ') else: ftps.mkd( folder ) #Create a new directory called "wordpress-todayDate" on the server. lg.debug(f'{folder} 2') ftps.cwd(folder)
class ServerWatcher(Watcher): downloadProgress = Signal(( int, int, )) uploadProgress = Signal(( int, int, )) # Si added: textStatus = Signal((str, )) fileEvent = Signal((str, )) fileEventCompleted = Signal() loginCompleted = Signal(( bool, str, )) badFilenameFound = Signal((str, )) LOCATION = 'server' TEST_FILE = 'iqbox.test' def __init__(self, host, ssl, parent=None): """ Initializes parent class and attributes. Decides whether to use `FTP_TLS` or `FTP` based on the `ssl` param. :param host: Location of the FTP server :param ssl: Tells whether the FTP needs to support TLS or not :param parent: Reference to a `QObject` instance a parent """ super(ServerWatcher, self).__init__(parent) self.interval = 5000 self.localdir = '' self.deleteQueue = [] self.downloadQueue = [] self.uploadQueue = [] self.warnedNames = [] self.ftp = None self.useSSL = ssl self.host = host self.preemptiveCheck = False self.preemptiveActions = [] self.testFile = 'iqbox.test' @property def currentdir(self): """Returns the current working directory at the server""" return self.ftp.pwd() def setLocalDir(self, localdir): """ Sets the local directory used to stored all downloaded files. Creates the directory if needed. :param localdir: Absolute path to local directory """ self.localdir = localdir if not os.path.exists(self.localdir): os.makedirs(self.localdir) @pause_timer @Slot() def checkout(self): """ Recursively checks out all files on the server. Returns a dictionary of files on the server with their last modified date. :param download: Indicates whether or not the files should be downloaded """ # Check `self.deleteQueue`, `self.uploadQueue` and `self.downloadQueue` queues. # These tasks are done in queues to make sure all FTP commands # are done sequentially, in the same thread. self.deleteAll() self.uploadAll() self.downloadAll() # Handy list to keep track of the checkout process. # This list contain absolute paths only. checked_dirs = list() # Sets '/' as initial directory and initializes `downloading_dir` self.ftp.cwd('/') downloading_dir = self.currentdir check_date = dt.utcnow() sidirlist = list() root_cached = False fileC = 0 while True: # Gets the list of sub directories and files inside the # current directory `downloading_dir`. self.textStatus.emit('Remote scan- Downloading folder list of ' + downloading_dir + '...') if root_cached and downloading_dir == '/': dir_subdirs = saved_root_dirs dirfiles = saved_root_files else: dir_subdirs = self.getDirs(downloading_dir) if downloading_dir == '/': saved_root_dirs = dir_subdirs # sidirlist.extend(dir_subdirs) self.textStatus.emit( 'Remote scan- Downloading files list of ' + downloading_dir + '...') dirfiles = self.getFiles(downloading_dir) if downloading_dir == '/': saved_root_files = dirfiles root_cached = True # Leading '/' in `downloading_dir` breaks the `os.path.join` call localdir = os.path.join(self.localdir, downloading_dir[1:]) if not os.path.exists(localdir): # Creates the directory if it doesn't already exists. os.makedirs(localdir) for file_ in dirfiles: # `serverpath` is the absolute path of the file on the server, # download it only if it hasn't been already downloaded serverpath = os.path.join(downloading_dir, file_) serverpath = QDir.fromNativeSeparators(serverpath) server_file = File.fromPath(serverpath) self.textStatus.emit('Scanning remote file... ' + serverpath + '...') # How do we know if we should check this server file? # We see if the date last checked is the check start time. if server_file.last_checked_server != check_date: # Do this process only once per file # Added by Simon # Give feedback on scanning of files. fileC += 1 if fileC % 1 == 2: self.textStatus.emit( 'Scanning remote files for changes, ' + str(fileC) + ' files scanned.') # STEP: IS THIS THE FIRST TIME WE SAW THE FILE, OR WAS IT ALREADY IN OUR DB? just_added = not server_file.inserver # STEP: IF ITS A NEW FILE, ENSURE WE DONT WANT TO SKIP IT # Example: If it's a temporary file, or a Unix file with a name we don't support. if just_added: filename = os.path.basename(serverpath) if platform.system() == 'Windows': badName = False for chr in [ '\\', '/', ':', '?', '"', '<', '>', '|' ]: if chr in filename: badName = True break if badName: if filename not in self.warnedNames: self.warnedNames.append(filename) self.badFilenameFound.emit(filename) continue # STEP: ASSUMING THE FILE DID EXIST IN OUR DB, LETS SAVE THE LAST MODIFICATION DATE lastmdate = server_file.servermdate # STEP: SAVE THE MOD DATE TO A VARIABLE # Now we get the last mod time. # We expect this to work fine since this file # was found on the server servermdate = self.lastModified(serverpath) # STEP: SET BOOL SHOWING THAT IT WAS ON THE SERVER, SINCE WE KNOW IT IS. server_file.inserver = True # STEP: SET THE TIME THE FILE WAS LAST CHECKED TO THE SCAN START TIME server_file.last_checked_server = check_date # STEP: SET THE MOD DATE IN THE DATABASE TO THE ONE WE JUST GOT server_file.servermdate = servermdate # STEP: SAVE THIS CHANGE TO THE DATABASE server_file.session.commit() delta = 0 if server_file.inlocal: delta = server_file.timeDiff() # Emit the signals after the attributes has been set and committed if just_added is True: self.fileAdded.emit(ServerWatcher.LOCATION, serverpath) elif server_file.servermdate > lastmdate or delta < -Watcher.TOLERANCE: self.fileChanged.emit(ServerWatcher.LOCATION, serverpath, False) #END FOR self.textStatus.emit('Remote scan- Finding next folder...') dir_ready = True for dir_ in dir_subdirs: # `dirpath` is the absolute path of the subdirectory on the server, dirpath = QDir.fromNativeSeparators( os.path.join(downloading_dir, dir_)) # `downloading_dir` is ready only when all its subdirectory are on the # `checked_dirs` list. if dirpath not in checked_dirs: # Found one subdirectory that is not on `checked_dirs`, # will process it in the next iteration. downloading_dir = dirpath dir_ready = False break if dir_ready is True: # All subdirectories of `downloading_dir` are already in `checked_dirs` if downloading_dir == '/': # All directories ready and at '/', means checkout is complete # So, exit the main While loop!! break else: # Not at '/'. Current directory is ready so is appended to `checked_dirs` # Back one directory to find directories that are not in `checked_dirs` checked_dirs.append(downloading_dir) downloading_dir = os.path.dirname(downloading_dir) self.textStatus.emit('Remote scan- Found Folder...') ##### END OF WHILE ################ ################################################################### # Deleted files are the ones whose `last_checked_server` attribute # didn't get updated in the recursive run. session = Session() deleted = session.query(File).filter( File.last_checked_server < check_date).filter( File.inserver == True) for file_ in deleted: self.fileDeleted.emit(ServerWatcher.LOCATION, file_.path) # Wraps up the checkout process, commits to the database. session.commit() @Slot() def onLogin(self, username, passwd): ok = True msg = '' error_msg = 'Login failed.' try: if not self.ftp: self.ftp = FTP_TLS(self.host) if self.useSSL is True else FTP( self.host) loginResponse = self.ftp.login(username, passwd) except socket.gaierror: self.ftp = None ok = False msg = 'Server address could not be found.' except (error_perm, error_reply): info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) ok = False msg = error_msg else: if '230' in loginResponse: ok = True else: ok = False msg = error_msg if ok: # Logged in. Now let's do compability tests. if not self.testPermissions(): # User doesn't have write permissions, don't bother doing next test. ok = False msg = 'It seems like you do not have write access to this server.' else: # Permissions test passed, now let's test MFMT for timestamp modification. if not self.testMFMT(): ok = False msg = 'This server does not support timestamp modification\n \ need by this application.' self.loginCompleted.emit(ok, msg) def getFiles(self, path): """ This method simply wraps the `nlst` method with an exception handler, and returns an empty list in case an exception is caught. :param path: Relative or absolute path on the server """ try: nlst = self.ftp.nlst(path) dirs = self.getDirs(path) # Files are items in nlst that are not in dirs files = [ item for item in nlst if os.path.basename(item) not in dirs ] return files except: print 'Exception in ServerWatcher.getDirs' info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) return [] def getDirs(self, path): """ Retrieves a list of the directories inside `path`, uses `retrlines` and the LIST command to retrieve the items. :param path: Relative or absolute path on the server """ dirs = list() def handleLine(line): """ Recieves a line from the LIST command. This function is meant to be used as callback for the `retrlines` method. :params line: Line from the LIST command """ if line.startswith('d'): # Only lines starting with 'd' are directories # Parse the directory out of the line; lines look like: # 'drwxrwxrwx 1 user group 0 Jun 15 2012 dirname' dirname = line[55:].strip() if dirname != '.' and dirname != '..': # Ignoring '.' and '..' entries dirs.append(dirname) try: self.ftp.retrlines('LIST %s' % path, handleLine) return dirs except: print 'Exception in ServerWatcher.getDirs' info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) return [] @upload_test def testPermissions(self): # For interface purposes. upload_test takes care of everything. return True @upload_test def testMFMT(self): # Absurd date to test whether the change really happened. time = dt.utcfromtimestamp(100000000) try: self.setLastModified(self.testFile, time) otherTime = self.lastModified(self.testFile) diff = (time - otherTime).total_seconds() if abs(diff) < 2: # Let's give it a 2 seconds tolerance. mdtm = True else: mdtm = False except (ValueError, error_reply, error_perm): info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) mdtm = False return mdtm @Slot(str) def onDelete(self, filename): self.deleteQueue.append(filename) def deleteNext(self): if len(self.deleteQueue) > 0: next = self.deleteQueue.pop(0) self.deleteFile(next) def deleteAll(self): for filename in self.deleteQueue: self.deleteFile(filename) self.deleteQueue = [] @Slot(str) def deleteFile(self, filename): """ Deletes the file `filename` to the server :param filename: Absolute or relative path to the file """ try: print 'Deleting %s' % filename self.ftp.delete(filename) return True except (error_reply, error_perm): print 'Error deleting %s' % filename return False self.fileEventCompleted.emit() @Slot(str) def onDownload(self, filename): self.downloadQueue.append(filename) def downloadNext(self): if len(self.downloadQueue) > 0: next = self.downloadQueue.pop(0) self.downloadFile(next) def downloadAll(self): for filename in self.downloadQueue: self.downloadFile(filename) self.downloadQueue = [] @Slot(str, str) def downloadFile(self, filename, localpath=None): """ Performs a binary download to the file `filename` located on the server. `filename` parameter can be either absolute or relative, though it can fail for relative paths if the current directory is not appropiate. :param filename: Relative or absolute path to the file :param localpath: Absolute local path where the file will be saved """ def handleChunk(chunk): """ Receives chuncks of data downloaded from the server. This function is meant to be used as callback for the `retrbinary` method. :params chunk: Chunk of downloaded bytes to be written into the file """ # Simply writes the received data into the file `self.downloading` self.downloading.write(chunk) self.download_progress += len(chunk) self.downloadProgress.emit(self.download_size, self.download_progress) if localpath is None: localpath = self.localFromServer(filename) localdir = os.path.dirname(localpath) if not os.path.exists(localdir): # Creates the directory if it doesn't already exists. os.makedirs(localdir) print 'Downloading: %s to %s' % (filename, localpath) try: with open(localpath, 'wb') as f: # Opens the file at `localname` which will hold the downloaded file. # Object attributes regarding download status are updated accordingly. self.fileEvent.emit(filename) self.downloading = f self.download_progress = 0 self.download_size = int( self.ftp.sendcmd('SIZE %s' % filename).split(' ')[-1]) self.ftp.retrbinary('RETR %s' % filename, handleChunk) print 'Download finished' # Let's set the same modified time to that on the server. with File.fromPath(filename) as downloadedfile: mdate = LocalWatcher.lastModified(localpath) downloadedfile.localmdate = mdate downloadedfile.servermdate = mdate self.setLastModified(filename, mdate) downloaded = True except (IOError, OSError): downloaded = False self.ioError.emit(localpath) except (error_reply, error_perm) as ftperr: print 'Error downloading %s, %s' % (filename, ftperr) downloaded = False # TODO: Sometimes the file doesn't complete properly. # in that case we maybe shouldn't call this? self.fileEventCompleted.emit() return downloaded @Slot(str) def onUpload(self, filename): self.uploadQueue.append(filename) def uploadNext(self): if len(self.uploadQueue) > 0: next = self.uploadQueue.pop(0) self.uploadFile(next) def uploadAll(self): for filename in self.uploadQueue: self.uploadFile(filename) self.uploadQueue = [] @Slot(str) def uploadFile(self, filename): """ Uploads the file `filename` to the server, creating the needed directories. :param filename: Absolute or relative path to the file """ def handle(buf): """This function is meant to be used as callback for the `storbinary` method.""" self.upload_progress += 1024 self.uploadProgress.emit(self.upload_size, self.upload_progress) # Creates the directory where the file will be uploaded to self.mkpath(os.path.dirname(filename)) localpath = self.localFromServer(filename) print 'Uploading %s to %s' % (localpath, filename) try: # Uploads file and updates its modified date in the server # to match the date in the local filesystem. self.upload_progress = 0 self.upload_size = os.path.getsize(localpath) self.fileEvent.emit(localpath) self.ftp.storbinary('STOR %s' % filename, open(localpath, 'rb'), 1024, handle) print 'Upload finished' with File.fromPath(filename) as uploaded: modified = uploaded.localmdate uploaded.servermdate = modified self.setLastModified(filename, modified) uploaded = True except (IOError, OSError): uploaded = False self.ioError.emit(localpath) except (error_reply, error_perm, OSError) as err: print 'Error uploading %s, %s' % (filename, err) uploaded = False # TODO: Sometimes the file doesn't complete properly. # in that case we maybe shouldn't call this? self.fileEventCompleted.emit() return uploaded def lastModified(self, filename): """ Uses the MDTM FTP command to find the last modified timestamp of the file `filename`. Returns a `datetime.datetime` object in UTC representing the file's last modified date and time. :param filename: Relative or absolute path to the file """ timestamp = self.ftp.sendcmd('MDTM %s' % filename) if '213 ' not in timestamp: # Second chance was found to be needed in some cases. timestamp = self.ftp.sendcmd('MDTM %s' % filename) timestamp = timestamp.split(' ')[-1] dateformat = '%Y%m%d%H%M%S.%f' if '.' in timestamp else '%Y%m%d%H%M%S' try: mtime = dt.strptime(timestamp, dateformat) except ValueError: mtime = dt.utcnow() return mtime def setLastModified(self, serverpath, newtime): """ Uses the MFMT or MDTM FTP commands to set `newtime` as the modified timestamp of the file `serverpath` on the server. :param serverpath: Relative or absolute path to the file :param newtime: datedatime object holding the required time """ cmds = ['MFMT', 'MDTM'] for cmd in cmds: try: self.ftp.sendcmd( '%s %s %s' % (cmd, newtime.strftime('%Y%m%d%H%M%S'), serverpath)) return except (error_perm, error_reply) as e: if cmd == cmds[len(cmds) - 1]: # If is the last comand, re-raise the exception, else # keep trying. raise e else: continue def mkpath(self, path): """ Creates the path `path` on the server by recursively created folders, if needed. :param path: Absolute path on the server to be created """ try: self.ftp.cwd(path) except error_perm: # `cwd` call failed. Need to create some folders make_dir = '/' steps = path.split('/') for step in steps: if len(step) == 0: continue make_dir += '%s/' % step try: self.ftp.mkd(make_dir) except error_perm: # Probably already exists continue else: # `cwd` call succeed. No need to create # any folders self.ftp.cwd('/') return @Slot(str, str) def added(self, location, serverpath): super(ServerWatcher, self).added(location, serverpath) def actionFromPath(serverpath): f = File() fileExistsOnServer = True try: f.servermdate = self.lastModified(serverpath) except error_perm: fileExistsOnServer = False f.servermdate = 0 f.localmdate = LocalWatcher.lastModified( self.localFromServer(serverpath)) diff = f.timeDiff() action = None if abs(diff) > Watcher.TOLERANCE: if not fileExistsOnServer or diff > 0: action = FileAction(serverpath, FileAction.UPLOAD, ServerWatcher.LOCATION) else: action = FileAction(serverpath, FileAction.DOWNLOAD, LocalWatcher.LOCATION) return action if self.preemptiveCheck: if location == ServerWatcher.LOCATION: localpath = self.localFromServer(serverpath) if not os.path.exists(localpath): action = FileAction(serverpath, FileAction.DOWNLOAD, ServerWatcher.LOCATION) self.preemptiveActions.append(action) else: action = actionFromPath(serverpath) if action is not None: self.preemptiveActions.append(action) elif location == LocalWatcher.LOCATION: try: self.ftp.sendcmd('SIZE %s' % serverpath) except (error_reply, error_perm): exists = False else: exists = True if not exists: action = FileAction(serverpath, FileAction.UPLOAD, LocalWatcher.LOCATION) self.preemptiveActions.append(action) else: action = actionFromPath(serverpath) if action is not None: self.preemptiveActions.append(action) @Slot(str, str) def changed(self, location, serverpath): super(ServerWatcher, self).changed(location, serverpath) @Slot(str, str) def deleted(self, location, serverpath): super(ServerWatcher, self).deleted(location, serverpath) with File.fromPath(serverpath) as deleted: deleted.inserver = False
class Cerberus(Engine): ftp_host = 'url.retail.publishedprices.co.il' ftp_path = '/' ftp_username = '' ftp_password = '' target_file_extension = '.xml' ftp = False def scrape(self): super(Cerberus, self).scrape() loop = asyncio.get_event_loop() files = self.get_files() loop.run_until_complete(self.persist_files(files)) def get_files(self): self.ftp = FTP_TLS(self.ftp_host, self.ftp_username, self.ftp_password) self.ftp.cwd(self.ftp_path) files = self.ftp.nlst() self.ftp.quit() return files async def persist_files(self, file_names): loop = asyncio.get_event_loop() EXECUTOR = concurrent.futures.ThreadPoolExecutor(max_workers=8) futures = [] for file_name in file_names: futures.append( loop.run_in_executor(EXECUTOR, self.persist_file, file_name)) for response in await asyncio.gather(*futures): pass def persist_file(self, file_name): extension = os.path.splitext(file_name)[1] if extension != '.gz': return Logger.file_parse(self.chain, file_name) temporary_gz_file_path = os.path.join(self.storage_path, file_name) self.fetch_temporary_gz_file(temporary_gz_file_path) Gzip.extract_xml_file_from_gz_file(temporary_gz_file_path) os.remove(temporary_gz_file_path) def fetch_temporary_gz_file(self, temporary_gz_file_path): with open(temporary_gz_file_path, 'wb') as file_ftp: file_name = ntpath.basename(temporary_gz_file_path) try: ftp = FTP_TLS(self.ftp_host, self.ftp_username, self.ftp_password) ftp.cwd(self.ftp_path) ftp.retrbinary('RETR ' + file_name, file_ftp.write) ftp.quit() except: Logger.file_parse(self.chain, file_name) self.fetch_temporary_gz_file(temporary_gz_file_path)
cursorObj.execute("SELECT mail FROM productores WHERE id='" + clave + "'") a = cursorObj.fetchall() return a con = sql_connection() #objeto conexion a servidor ftp = FTP_TLS() ftp.connect(host='144.217.157.48', port=65022, timeout=3000) ftp.login(user='******', passwd='Agosto.2019', acct='') #print(ftp.pwd()) #Lista los nombres de todos los archivos del servidor juan = ftp.nlst() print(juan) #los recorre for h in juan: #chequea si el codigo del archivo pertenece a algun productor de la base de datos n = (sql_existe(con, h[-23:-18])) r = n[0] z = r[0] esta = 0 #si esta lo manda a su carpeta correspondiente if (z == 1): cupa = (sql_dameNombre(con, h[-23:-18])) es = cupa[0] el = es[0] os.chdir( os.path.join('C:', '\\Users', 'Franco', 'Documents',
def load_date(self, date_str): # Determine whether or not the requested date has already been # loaded. If it has, abort. if (self.n_date > 0): tk = where(self.date_str == date_str)[0] if (len(tk) > 0): return # Extract the year, month, day, and day of year of the requested # date. year = int(date_str[0:4]) mon = int(date_str[5:7]) day = int(date_str[8:10]) # Determine the name of the file that contains data # from the requested date. str_year = date_str[0:4] str_mon = date_str[5:7] str_day = date_str[8:10] fl0 = 'wi_h0_mfi_' + str_year + str_mon + str_day + '_v??.cdf' fl0_path = os.path.join(self.path, fl0) gb = glob(fl0_path) # If the file does not exist, attempt to download it. if (len(gb) > 0): fl_path = gb[-1] else: try: self.mesg_txt('ftp', date_str) ftp = FTP_TLS('spdf.gsfc.nasa.gov') ftp.login() ftp.cwd('pub/data/wind/mfi/') if (self.use_h2): ftp.cwd('mfi_h2/') else: ftp.cwd('mfi_h0/') ftp.cwd(str_year) ls = ftp.nlst(fl0) fl = ls[-1] fl_path = os.path.join(self.path, fl) ftp.retrbinary("RETR " + fl, open(fl_path, 'wb').write) except: self.mesg_txt('fail', date_str) return # If the file now exists, try to load it; otherwise, # abort. self.mesg_txt('load', date_str) if (os.path.isfile(fl_path)): try: cdf = pycdf.CDF(fl_path) except: self.mesg_txt('fail', date_str) return else: self.mesg_txt('fail', date_str) return # Extract the data from the loaded file and select those data # which seem to have valid (versus fill) values. if (self.use_h2): # Extract the data from the loaded file. sub_t = cdf['Epoch'][:, 0] sub_b_x = cdf['BGSE'][:, 0] sub_b_y = cdf['BGSE'][:, 1] sub_b_z = cdf['BGSE'][:, 2] sub_ind = tile(self.t_date, len(sub_t)) # Select those data which seem to have valid (versus # fill) values. # TODO: Establish quality checks. n_tk = len(sub_t) tk = arange(n_tk) else: # Extract the data from the loaded file. sub_t = cdf['Epoch3'][:, 0] sub_b_x = cdf['B3GSE'][:, 0] sub_b_y = cdf['B3GSE'][:, 1] sub_b_z = cdf['B3GSE'][:, 2] sub_pnt = cdf['NUM3_PTS'][:, 0] sub_ind = tile(self.t_date, len(sub_t)) # Select those data which seem to have valid (versus # fill) values. tk = where(sub_pnt > 0)[0] n_tk = len(tk) # Copy the date associated with this file into and # array. new_date_str = [date_str] new_date_ind = [self.t_date] n_new_date = 1 # Append any valid, newly-loaded data to the saved arrays. if (n_tk > 0): self.mfi_t = append(self.mfi_t, sub_t[tk]) self.mfi_b_x = append(self.mfi_b_x, sub_b_x[tk]) self.mfi_b_y = append(self.mfi_b_y, sub_b_y[tk]) self.mfi_b_z = append(self.mfi_b_z, sub_b_z[tk]) self.mfi_ind = append(self.mfi_ind, sub_ind[tk]) # Append the array of loaded dates with the date(s) loaded in # this call of this function. self.date_str = append(self.date_str, new_date_str) self.date_ind = append(self.date_ind, new_date_ind) self.n_date += n_new_date self.t_date += n_new_date # Request a clean-up of the files in the data directory. self.cleanup_file()
os.makedirs(mydatabase) def wgetdown(link): os.system('wget --quiet --directory-prefix=' + mydowndir + ' ' + link) for downburst in downburstlist: os.system('rm -rf ' + filelistdir + '/*') os.system('rm -rf ' + mydowndir + '/*') year = '20' + downburst[2:4] if os.path.exists(mydatabase + year) == False: os.makedirs(mydatabase + year) f.login() f.prot_p() f.cwd(fermidatabase + year + '/' + downburst + '/current/') files = f.nlst() with open(filelistdir + 'list.txt', 'w') as newlistfile: for filename in files: newlistfile.write(ftplink + year + '/' + downburst + '/current/' + filename + '\n') with open(filelistdir + 'list.txt') as downlistfile: filelinks = downlistfile.readlines() if __name__ == '__main__': print('Downloading burst:', downburst, '...') p = Pool(nthread) p.map(wgetdown, filelinks) os.system('mkdir ' + mydatabase + year + '/' + downburst) os.system('mv ' + mydowndir + '/* ' + mydatabase + year + '/' + downburst)
# Get list of groups user is a member of r = requests.get('%s/groups?token=%s' % (GROUPME_API_URL, TOKEN)) resp = r.json() print 'Groups:' for item in resp['response']: print item['name'] # Securely connect to FTP server ftps = FTP_TLS(HOST, USER, PASSWD) ftps.prot_p() # Change working directory to directory containing images ftps.cwd(IMAGE_DIRECTORY) # Get list of items in current directory directory_list = ftps.nlst() # Get list of images image_list = [item for item in directory_list if '.jpg' in item] # Save oldest & newest images images_to_upload = [] if image_list: # Add first image images_to_upload.append(image_list[0]) if len(image_list) > 1: # Add last image (if more than 1 image) images_to_upload.append(image_list[len(image_list) - 1]) # Download oldest & newest image for image in images_to_upload: print 'Downloading %s...' % image ftps.retrbinary('RETR %s' % image, open(image, 'wb').write)
def group_listing(): # Get list of groups user is a member of r = requests.get('%s/groups?token=%s' % (GROUPME_API_URL, TOKEN)) resp = r.json() print 'Groups:' for item in resp['response']: print item['name'] # Securely connect to FTP server ftps = FTP_TLS(HOST, USER, PASSWD) ftps.prot_p() # Change working directory to directory containing images ftps.cwd(IMAGE_DIRECTORY) # Get list of items in current directory directory_list = ftps.nlst() # Get list of images image_list = [item for item in directory_list if '.jpg' in item] # Save oldest & newest images images_to_upload = [] if image_list: # Add first image images_to_upload.append(image_list[0]) if len(image_list) > 1: # Add last image (if more than 1 image) images_to_upload.append(image_list[len(image_list)-1]) # Download oldest & newest image for image in images_to_upload: print 'Downloading %s...' % image ftps.retrbinary('RETR %s' % image, open(image, 'wb').write)
timeCorrection = datetime.timedelta(hours=hoursCorrection) foldername = str(datetime.date.today()) PATH_TO_SAVE_CVSDIRECTORIES = '/path/files/' directoryToDelete = '/path/files/' daysToKeepCSV = 1 client1 = mqtt.Client() deleteOldFiles() try: ftps = FTP_TLS('host') ftps.login('user', 'password') ftps.prot_p() # switch to secure data connection ftps.cwd('dir') ftps.cwd(str(datetime.date.today())) files = ftps.nlst() # Get All Files for file in files: #print("Downloading..." + file) if (os.path.isdir(PATH_TO_SAVE_CVSDIRECTORIES + str(datetime.date.today())) == False): os.mkdir(PATH_TO_SAVE_CVSDIRECTORIES + str(datetime.date.today())) ftps.retrbinary( "RETR " + file, open( str(PATH_TO_SAVE_CVSDIRECTORIES + str(datetime.date.today()) + "/" + file), 'wb').write) ftps.close() #print("END OK") except: client1.connect(broker, port)
def run(self): # pull our existing stats from online disk try: import ftplib from ftplib import FTP_TLS ftp = FTP_TLS('address') ftp.login('user', 'password') ftp.prot_p() ftp.cwd('/files/') dir = ftp.nlst() if 'stats.json' in dir: File = open("stats.json", "wb") ftp.retrbinary('RETR stats.json', File.write) File.close() File = open("stats.json", "r") stats = json.loads(File.read()) File.close() else: stats = {} print 'stats file not exist on ftp' except Exception: print 'error while connecting ftp' bs.screenMessage('CONNECTION ERROR', color=(1, 0, 0)) else: # now add this batch of kills to our persistant stats for account_id, kill_count in self._account_kills.items(): # add a new entry for any accounts that dont have one if account_id not in stats: # also lets ask the master-server for their account-display-str. # (we only do this when first creating the entry to save time, # though it may be smart to refresh it periodically since # it may change) url = 'http://bombsquadgame.com/accountquery?id=' + account_id response = json.loads( urllib2.urlopen(urllib2.Request(url)).read()) name_html = response['name_html'] stats[account_id] = { 'kills': 0, 'killed': 0, 'scores': 0, 'played': 0, 'name_html': name_html } # now increment their kills whether they were already there or not stats[account_id]['kills'] += kill_count for account_id, killed_count in self._account_killed.items(): stats[account_id]['killed'] += killed_count for account_id, scores_count in self._account_scores.items(): stats[account_id]['scores'] += scores_count for account_id, played_count in self._account_played.items(): stats[account_id]['played'] += played_count for account_id, name in self._account_name.items(): stats[account_id]['name_full'] = name # dump our stats back to disk File = open("stats.json", "w") File.write(json.dumps(stats, indent=4)) File.close() File = open("stats.json", "r") ftp.storbinary('STOR stats.json', File) File.close() # lastly, write a pretty html version. # our stats url could point at something like this... entries = [(a['kills'], a['killed'], a['scores'], a['played'], a['name_html']) for a in stats.values()] # this gives us a list of kills/names sorted high-to-low entries.sort(reverse=True) htmlFile = open("statspage.html", "wb") htmlFile.write('<head><meta charset="UTF-8"></head><body>') for entry in entries: kills = str(entry[0]) killed = str(entry[1]) scores = str(entry[2]) played = str(entry[3]) name = entry[4].encode('utf-8') htmlFile.write(kills + ' kills ' + killed + ' deaths ' + scores + ' score ' + played + ' games : ' + name + '<br>') htmlFile.write('</body>') htmlFile.close() htmlFile = open("statspage.html", "r") ftp.storbinary('STOR statspage.html', htmlFile) htmlFile.close() ftp.quit() # aaand that's it! There IS no step 27! print 'Added', len(self._account_played), 'Log entries.'
def run(self): self.master.status = "establishing connection to server..." try: ftps = FTP_TLS(self.master.ftp_url, self.master.ftp_user, self.master.ftp_pw) ftps.cwd(self.master.ftp_dir) picnames = np.array(ftps.nlst())[2:] picnumbers = map(int, [name[0:-4] for name in picnames]) maxnumber = max(picnumbers) self.master.status = "connection successful" except: traceback.print_exc() self.master.status = "could not establish connection" self.master.notuploading = True html_pics = '' pic_1 = '''<div class="responsive"> <div class="gallery"> <img src="/pictures/''' pic_2 = '''.jpg" width="600" height="400"> <div class="desc"></div> </div> </div>''' picnumber = maxnumber + 1 if not os.path.exists(self.master.dirpath + '/smallpics'): os.makedirs(self.master.dirpath + '/smallpics') for filename in os.listdir(self.master.dirpath): #print filename #os.rename(os.dirpath.join(dirpath,filename), os.dirpath.join(dirpath,str(picnumber)+'.jpg')) if filename[-4:] != ".jpg" and filename[-4:] != ".png": continue picpath = self.master.dirpath + '/' + filename #+ str(picnumber) + '.jpg' pic = ndimage.imread(picpath) fac = 1328. / max(pic.shape) smallpic = misc.imresize(pic, fac) newpath = self.master.dirpath + '/smallpics/' + str( picnumber) + '.jpg' misc.imsave(newpath, smallpic) html_pics = html_pics + pic_1 + str(picnumber) + pic_2 #upload pic self.master.status = "uploading picture " + newpath fopen = open(newpath, 'r') storcommand = "STOR " + str(picnumber) + '.jpg' ftps.storbinary(storcommand, fopen) fopen.close() picnumber = picnumber + 1 html_intro = self.master.html_intro_1 + self.master.category + self.master.html_intro_2 full_html = html_intro + self.master.html_text + html_pics + self.master.html_end html_name = self.master.title + ".php" html_path = self.master.codedir + '/' + self.master.date + "_" + html_name fopen = open(html_path, "w") fopen.write(full_html) fopen.close() #upload try: self.master.status = "uploading html " + html_path fopen = open(html_path, 'r') storcommand = "STOR " + self.master.date + '_' + html_name ftps.cwd('..') ftps.storbinary(storcommand, fopen) fopen.close() ftps.quit() self.master.status = "uploading succesful" self.master.notuploading = True except: traceback.print_exc() self.master.notuploading = True
class FTPClient(object): """Class FTPClient """ _mh = None _client = None _secured = None _host = None _port = None _user = None _passw = None _path = None _verbose = None _is_connected = None def __init__(self, secured=False, verbose=False): """Class constructor Called when the object is initialized Args: secured (bool): secured FTP verbose (bool): verbose mode """ self._mh = MasterHead.get_head() self._secured = secured if (not self._secured): self._client = FTP() else: if (not(version_info[0] == 2 and version_info[1] == 6)): self._client = FTP_TLS() else: raise NotImplementedError( 'Secured mode is not supported for Python 2.6') self._verbose = verbose if (self._verbose): self._client.set_debuglevel(2) @property def client(self): """ FTP client property getter """ return self._client @property def secured(self): """ secured protocol mode property getter """ return self._secured @property def host(self): """ server host property getter """ return self._host @property def port(self): """ server port property getter """ return self._port @property def user(self): """ username property getter """ return self._user @property def passw(self): """ user password property getter """ return self._passw @property def path(self): """ remote path property getter """ return self._path @property def verbose(self): """ verbose mode property getter """ return self._verbose @property def is_connected(self): """ is_connected property getter """ return self._is_connected def connect(self, host, port=21, user=None, passw=None, path='/', timeout=10): """Method connects to server Args: host (str): server host port (int): server port, default protocol port user (str): username passw (str): password path (str): server path timeout (int): timeout Returns: bool: result Raises: event: ftp_before_connect event: ftp_after_connect """ try: message = '{0}/{1}@{2}:{3}{4} timeout:{5}'.format( user, passw, host, port, path, timeout) self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_connecting', message), self._mh.fromhere()) ev = event.Event( 'ftp_before_connect', host, port, user, passw, path, timeout) if (self._mh.fire_event(ev) > 0): host = ev.argv(0) port = ev.argv(1) user = ev.argv(2) passw = ev.argv(3) path = ev.argv(4) timeout = ev.argv(5) self._host = host self._port = port self._user = user self._passw = passw if (ev.will_run_default()): self._client.connect(self._host, self._port, timeout=timeout) if (self._user != None): self._client.login(self._user, self._passw) if (self._secured): self._client.prot_p() self._is_connected = True self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_connected'), self._mh.fromhere()) if (path != None): self.change_dir(path) ev = event.Event('ftp_after_connect') self._mh.fire_event(ev) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return False def disconnect(self): """Method disconnects from server Args: none Returns: bool: result """ try: if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False else: self._client.quit() self._is_connected = False self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_disconnected'), self._mh.fromhere()) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return False def list_dir(self): """Method lists remote working directory Args: none Returns: list: names """ try: self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_list_dir', self._path), self._mh.fromhere()) if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False names = self._client.nlst() if ('.' in names): names.remove('.') if ('..' in names): names.remove('..') return names except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return None def change_dir(self, path): """Method changes remote working directory Args: path (str): new remote path Returns: bool: result Raises: event: ftp_before_change_dir """ try: self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_change_dir', path), self._mh.fromhere()) if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False ev = event.Event('ftp_before_change_dir', path) if (self._mh.fire_event(ev) > 0): path = ev.argv(0) if (ev.will_run_default()): self._client.cwd(path) self._path = self._client.pwd() self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_cur_dir', self._path), self._mh.fromhere()) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return False def download_file(self, remote_path, local_path=None): """Method downloads file from server Args: remote_path (str): remote path local_path (str): local path, default ./filename Returns: bool: result Raises: event: ftp_before_download_file event: ftp_after_download_file """ try: self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_downloading_file', remote_path), self._mh.fromhere()) if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False ev = event.Event( 'ftp_before_download_file', remote_path, local_path) if (self._mh.fire_event(ev) > 0): remote_path = ev.argv(0) local_path = ev.argv(1) if (local_path != None and not path.exists(local_path)): self._mh.demsg('htk_on_error', self._mh._trn.msg( 'htk_ftp_unknown_dir', local_path), self._mh.fromhere()) return False filename = remote_path.split('/')[-1] lpath = filename if (local_path == None) else path.join( local_path, filename) if (ev.will_run_default()): with open(lpath, 'wb') as f: self._client.retrbinary('RETR ' + remote_path, f.write) self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_file_downloaded'), self._mh.fromhere()) ev = event.Event('ftp_after_download_file') self._mh.fire_event(ev) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) if (path.exists(lpath)): remove(lpath) return False def upload_file(self, local_path, remote_path=None): """Method uploads file to server Args: local_path (str): local path remote_path (str): remote path, default ./filename Returns: bool: result Raises: event: ftp_before_upload_file event: ftp_after_upload_file """ try: self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_uploading_file', local_path), self._mh.fromhere()) if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False ev = event.Event('ftp_before_upload_file', local_path, remote_path) if (self._mh.fire_event(ev) > 0): local_path = ev.argv(0) remote_path = ev.argv(1) if (not(path.exists(local_path) or path.exists(path.relpath(local_path)))): self._mh.demsg('htk_on_error', self._mh._trn.msg( 'htk_ftp_unknown_file', local_path), self._mh.fromhere()) return False filename = local_path.split('/')[-1] rpath = filename if (remote_path == None) else path.join( remote_path, filename) if (ev.will_run_default()): with open(local_path, 'rb') as f: self._client.storbinary('STOR ' + rpath, f) self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_file_uploaded'), self._mh.fromhere()) ev = event.Event('ftp_after_upload_file') self._mh.fire_event(ev) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return False def delete_file(self, path): """Method deletes file from server Args: path (str): remote path Returns: bool: result Raises: event: ftp_before_delete_file """ try: self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_deleting_file', path), self._mh.fromhere()) if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False ev = event.Event('ftp_before_delete_file', path) if (self._mh.fire_event(ev) > 0): path = ev.argv(0) if (ev.will_run_default()): self._client.delete(path) self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_file_deleted'), self._mh.fromhere()) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return False def make_dir(self, path): """Method makes directory on server Args: path (str): remote path Returns: bool: result Raises: event: ftp_before_make_dir """ try: self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_making_dir', path), self._mh.fromhere()) if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False ev = event.Event('ftp_before_make_dir', path) if (self._mh.fire_event(ev) > 0): path = ev.argv(0) if (ev.will_run_default()): self._client.mkd(path) self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_dir_made'), self._mh.fromhere()) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return False def remove_dir(self, path): """Method removes directory from server Args: path (str): remote path Returns: bool: result Raises: event: ftp_before_remove_dir """ try: self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_removing_dir', path), self._mh.fromhere()) if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False ev = event.Event('ftp_before_remove_dir', path) if (self._mh.fire_event(ev) > 0): path = ev.argv(0) if (ev.will_run_default()): self._client.rmd(path) self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_dir_removed'), self._mh.fromhere()) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return False
#ftps.login(user,secret) ftps.sendcmd('USER ' + user) ftps.sendcmd('PASS ' + secret) print(ftps.getwelcome()) print('CURRENT WORKING DIRECTORY IS:',ftps.pwd()) #Enable data encryption # TODO solve the encryption problem #ftps.prot_p() #define default DIR d = 'feeds' #Change to default DIR ftps.cwd(d) #Build list of files on servers l = ftps.nlst() l.sort() for i in l: print(i) #Assign last element to var litem = len(l)-1 print("MOST RECENT FILE ON SERVER IS; ",l[litem]) g = l[litem] #Define local file t = d + '/' + g if os.path.exists(t): print("FILE" ,g," EXISTS,WILL NOT DOWNLOAD FROM HOST:",host) else: print("WILL DOWNLOAD FILE:",g) #Construct 'RETR' string for FTP download function
def getData(MET, putdir=None, basedir='', getTTEflag=False, getPOSflag=False, getCSPECflag=False, getCTIMEflag=False, getAllflag=False): if getAllflag == True: getTTEflag = True getPOSflag = True getCSPECflag = True getCTIMEflag = True utc = fermi2utc(MET) yymmddpath = utc.strftime('%y') + '/' + utc.strftime( '%m') + '/' + utc.strftime('%d') + '/' if putdir == None: putdir = basedir + '/20' + yymmddpath + 'current/' if not os.path.exists(putdir): os.makedirs(putdir) ftps = FTP_TLS('legacy.gsfc.nasa.gov') ftps.login() ftpdir = 'fermi/data/gbm/daily/20' + yymmddpath + 'current/' ftps.cwd(ftpdir) foldfiles = ftps.nlst() try: if getPOSflag == True: tempfilename = [f for f in foldfiles if 'poshist' in f][0] if len(tempfilename) > 0: filename = [f for f in foldfiles if 'poshist' in f][0] putfold = putdir + filename if not os.path.isfile(putfold): putfile = open(putfold, 'wb') ftps.retrbinary("RETR " + filename, putfile.write) putfile.close() if getCTIMEflag == True: flist = [f for f in foldfiles if 'ctime' in f] for fn in flist: putfold = putdir + fn if not os.path.isfile(putfold): putfile = open(putfold, 'wb') ftps.retrbinary("RETR " + fn, putfile.write) putfile.close() if getCSPECflag == True: flist = [f for f in foldfiles if 'cspec' in f] for fn in flist: putfold = putdir + fn if not os.path.isfile(putfold): putfile = open(putfold, 'wb') ftps.retrbinary("RETR " + fn, putfile.write) putfile.close() if getTTEflag == True: dayfrac = int( ((utc.hour * 60.0 + utc.minute) * 60.0 + utc.second) / 86400. * 1000.) flist = [f for f in foldfiles if 'tte' in f] tlist = [] for thing in flist: pt = float(thing[17:20]) if pt not in tlist: tlist.append(pt) comp = 1000 for t in tlist: if dayfrac > t: if dayfrac - t < comp: comp = dayfrac - t goodfrac = str(int(dayfrac - comp)) goodstr = utc.strftime('%y%m%d') + str(goodfrac).zfill(3) flist = [x for x in flist if goodstr in x] for fn in flist: putfold = putdir + fn if not os.path.isfile(putfold): putfile = open(putfold, 'wb') ftps.retrbinary("RETR " + fn, putfile.write) putfile.close() ftps.quit() except IndexError: print('Index Error in getGBMdata') ftps.quit()
def upload(self, bangumi_tag='', debug_file=''): first_connect = True # 标记是否是第一次连接, 第一次连接会删除临时缓存目录 tmp_dir = str(self._sn) + '-uploading-by-aniGamerPlus' if debug_file: self.local_video_path = debug_file if not os.path.exists(self.local_video_path): # 如果文件不存在,直接返回失败 return self.upload_succeed_flag if not self._video_filename: # 用于仅上传, 将文件名提取出来 self._video_filename = os.path.split(self.local_video_path)[-1] socket.setdefaulttimeout(20) # 超时时间20s if self._settings['ftp']['tls']: ftp = FTP_TLS() # FTP over TLS else: ftp = FTP() def connect_ftp(show_err=True): ftp.encoding = 'utf-8' # 解决中文乱码 err_counter = 0 connect_flag = False while err_counter <= 3: try: ftp.connect(self._settings['ftp']['server'], self._settings['ftp']['port']) # 连接 FTP ftp.login(self._settings['ftp']['user'], self._settings['ftp']['pwd']) # 登陆 connect_flag = True break except ftplib.error_temp as e: if show_err: if 'Too many connections' in str(e): detail = self._video_filename + ' 当前FTP連接數過多, 5分鐘后重試, 最多重試三次: ' + str( e) err_print(self._sn, 'FTP狀態', detail, status=1) else: detail = self._video_filename + ' 連接FTP時發生錯誤, 5分鐘后重試, 最多重試三次: ' + str( e) err_print(self._sn, 'FTP狀態', detail, status=1) err_counter = err_counter + 1 for i in range(5 * 60): time.sleep(1) except BaseException as e: if show_err: detail = self._video_filename + ' 在連接FTP時發生無法處理的異常:' + str( e) err_print(self._sn, 'FTP狀態', detail, status=1) break if not connect_flag: err_print(self._sn, '上傳失败', self._video_filename, status=1) return connect_flag # 如果连接失败, 直接放弃 ftp.voidcmd('TYPE I') # 二进制模式 if self._settings['ftp']['cwd']: try: ftp.cwd(self._settings['ftp']['cwd']) # 进入用户指定目录 except ftplib.error_perm as e: if show_err: err_print(self._sn, 'FTP狀態', '進入指定FTP目錄時出錯: ' + str(e), status=1) if bangumi_tag: # 番剧分类 try: ftp.cwd(bangumi_tag) except ftplib.error_perm: try: ftp.mkd(bangumi_tag) ftp.cwd(bangumi_tag) except ftplib.error_perm as e: if show_err: err_print(self._sn, 'FTP狀態', '創建目錄番劇目錄時發生異常, 你可能沒有權限創建目錄: ' + str(e), status=1) # 归类番剧 ftp_bangumi_dir = Config.legalize_filename( self._bangumi_name) # 保证合法 try: ftp.cwd(ftp_bangumi_dir) except ftplib.error_perm: try: ftp.mkd(ftp_bangumi_dir) ftp.cwd(ftp_bangumi_dir) except ftplib.error_perm as e: if show_err: detail = '你可能沒有權限創建目錄(用於分類番劇), 視頻文件將會直接上傳, 收到異常: ' + str( e) err_print(self._sn, 'FTP狀態', detail, status=1) # 删除旧的临时文件夹 nonlocal first_connect if first_connect: # 首次连接 remove_dir(tmp_dir) first_connect = False # 标记第一次连接已完成 # 创建新的临时文件夹 # 创建临时文件夹是因为 pure-ftpd 在续传时会将文件名更改成不可预测的名字 # 正常中斷传输会把名字改回来, 但是意外掉线不会, 为了处理这种情况 # 需要获取 pure-ftpd 未知文件名的续传缓存文件, 为了不和其他视频的缓存文件混淆, 故建立一个临时文件夹 try: ftp.cwd(tmp_dir) except ftplib.error_perm: ftp.mkd(tmp_dir) ftp.cwd(tmp_dir) return connect_flag def exit_ftp(show_err=True): try: ftp.quit() except BaseException as e: if show_err and self._settings['ftp']['show_error_detail']: err_print(self._sn, 'FTP狀態', '將强制關閉FTP連接, 因爲在退出時收到異常: ' + str(e)) ftp.close() def remove_dir(dir_name): try: ftp.rmd(dir_name) except ftplib.error_perm as e: if 'Directory not empty' in str(e): # 如果目录非空, 则删除内部文件 ftp.cwd(dir_name) del_all_files() ftp.cwd('..') ftp.rmd(dir_name) # 删完内部文件, 删除文件夹 elif 'No such file or directory' in str(e): pass else: # 其他非空目录报错 raise e def del_all_files(): try: for file_need_del in ftp.nlst(): if not re.match(r'^(\.|\.\.)$', file_need_del): ftp.delete(file_need_del) # print('删除了文件: ' + file_need_del) except ftplib.error_perm as resp: if not str(resp) == "550 No files found": raise if not connect_ftp(): # 连接 FTP return self.upload_succeed_flag # 如果连接失败 err_print(self._sn, '正在上傳', self._video_filename + ' title=' + self._title + '……') try_counter = 0 video_filename = self._video_filename # video_filename 将可能会储存 pure-ftpd 缓存文件名 max_try_num = self._settings['ftp']['max_retry_num'] local_size = os.path.getsize(self.local_video_path) # 本地文件大小 while try_counter <= max_try_num: try: if try_counter > 0: # 传输遭中断后处理 detail = self._video_filename + ' 发生异常, 重連FTP, 續傳文件, 將重試最多' + str( max_try_num) + '次……' err_print(self._sn, '上傳狀態', detail, status=1) if not connect_ftp(): # 重连 return self.upload_succeed_flag # 解决操蛋的 Pure-Ftpd 续传一次就改名导致不能再续传问题. # 一般正常关闭文件传输 Pure-Ftpd 会把名字改回来, 但是遇到网络意外中断, 那么就不会改回文件名, 留着临时文件名 # 本段就是处理这种情况 try: for i in ftp.nlst(): if 'pureftpd-upload' in i: # 找到 pure-ftpd 缓存, 直接抓缓存来续传 video_filename = i except ftplib.error_perm as resp: if not str(resp ) == "550 No files found": # 非文件不存在错误, 抛出异常 raise # 断点续传 try: # 需要 FTP Server 支持续传 ftp_binary_size = ftp.size(video_filename) # 远程文件字节数 except ftplib.error_perm: # 如果不存在文件 ftp_binary_size = 0 except OSError: try_counter = try_counter + 1 continue ftp.voidcmd('TYPE I') # 二进制模式 conn = ftp.transfercmd('STOR ' + video_filename, ftp_binary_size) # ftp服务器文件名和offset偏移地址 with open(self.local_video_path, 'rb') as f: f.seek(ftp_binary_size) # 从断点处开始读取 while True: block = f.read(1048576) # 读取1M conn.sendall(block) # 送出 block if not block: time.sleep(3) # 等待一下, 让sendall()完成 break conn.close() err_print(self._sn, '上傳狀態', '檢查遠端文件大小是否與本地一致……') exit_ftp(False) connect_ftp(False) # 不重连的话, 下面查询远程文件大小会返回 None, 懵逼... # sendall()没有完成将会 500 Unknown command err_counter = 0 remote_size = 0 while err_counter < 3: try: remote_size = ftp.size(video_filename) # 远程文件大小 break except ftplib.error_perm as e1: err_print(self._sn, 'FTP狀態', 'ftplib.error_perm: ' + str(e1)) remote_size = 0 break except OSError as e2: err_print(self._sn, 'FTP狀態', 'OSError: ' + str(e2)) remote_size = 0 connect_ftp(False) # 掉线重连 err_counter = err_counter + 1 if remote_size is None: err_print(self._sn, 'FTP狀態', 'remote_size is None') remote_size = 0 # 远程文件大小获取失败, 可能文件不存在或者抽风 # 那上面获取远程字节数将会是0, 导致重新下载, 那么此时应该清空缓存目录下的文件 # 避免后续找错文件续传 if remote_size == 0: del_all_files() if remote_size != local_size: # 如果远程文件大小与本地不一致 # print('remote_size='+str(remote_size)) # print('local_size ='+str(local_size)) detail = self._video_filename + ' 在遠端為' + str( round(remote_size / float(1024 * 1024), 2)) + 'MB' + ' 與本地' + str( round(local_size / float(1024 * 1024), 2) ) + 'MB 不一致! 將重試最多' + str(max_try_num) + '次' err_print(self._sn, '上傳狀態', detail, status=1) try_counter = try_counter + 1 continue # 续传 # 顺利上传完后 ftp.cwd('..') # 返回上级目录, 即退出临时目录 try: # 如果同名文件存在, 则删除 ftp.size(self._video_filename) ftp.delete(self._video_filename) except ftplib.error_perm: pass ftp.rename(tmp_dir + '/' + video_filename, self._video_filename) # 将视频从临时文件移出, 顺便重命名 remove_dir(tmp_dir) # 删除临时目录 self.upload_succeed_flag = True # 标记上传成功 break except ConnectionResetError as e: if self._settings['ftp']['show_error_detail']: detail = self._video_filename + ' 在上傳過程中網絡被重置, 將重試最多' + str( max_try_num) + '次' + ', 收到異常: ' + str(e) err_print(self._sn, '上傳狀態', detail, status=1) try_counter = try_counter + 1 except TimeoutError as e: if self._settings['ftp']['show_error_detail']: detail = self._video_filename + ' 在上傳過程中超時, 將重試最多' + str( max_try_num) + '次, 收到異常: ' + str(e) err_print(self._sn, '上傳狀態', detail, status=1) try_counter = try_counter + 1 except socket.timeout as e: if self._settings['ftp']['show_error_detail']: detail = self._video_filename + ' 在上傳過程socket超時, 將重試最多' + str( max_try_num) + '次, 收到異常: ' + str(e) err_print(self._sn, '上傳狀態', detail, status=1) try_counter = try_counter + 1 if not self.upload_succeed_flag: err_print(self._sn, '上傳失敗', self._video_filename + ' 放棄上傳!', status=1) exit_ftp() return self.upload_succeed_flag err_print(self._sn, '上傳完成', self._video_filename, status=2) exit_ftp() # 登出 FTP return self.upload_succeed_flag
def load_date(self, date_str): # Determine whether or not the requested date has already been # loaded. If it has, abort. if (date_str in self.arr_date): return # Extract the year, month, day, and day of year of the requested # date. year = int(date_str[0:4]) mon = int(date_str[5:7]) day = int(date_str[8:10]) # Determine the name of the file that contains data from the # requested date. str_year = date_str[0:4] str_mon = date_str[5:7] str_day = date_str[8:10] fl0 = 'wi_k0_spha_' + str_year + str_mon + str_day + '_v??.cdf' fl0_path = os.path.join(self.path, fl0) # Search for the file in the local data directory. If it is not # found, attempt to download it. gb = glob(fl0_path) if (len(gb) > 0): fl_path = gb[-1] else: try: self.mesg_txt('ftp', date_str) ftp = FTP_TLS('spdf.gsfc.nasa.gov') ftp.login() ftp.cwd('pub/data/wind/orbit/spha_k0') ftp.cwd(str_year) ls = ftp.nlst(fl0) fl = ls[-1] fl_path = os.path.join(self.path, fl) ftp.retrbinary("RETR " + fl, open(fl_path, 'wb').write) except: self.mesg_txt('fail', date_str) return # If the file now exists locally, try to load it; otherwise, # abort. self.mesg_txt('load', date_str) if (os.path.isfile(fl_path)): try: cdf = pycdf.CDF(fl_path) except: self.mesg_txt('fail', date_str) return else: self.mesg_txt('fail', date_str) return # Append the requested date to the list of dates loaded. ind = len(self.arr_date) self.arr_date.append(date_str) # Extract the data from the loaded file. self.arr_spin_t += list(cdf['Epoch'][:]) self.arr_spin_w += list(cdf['AVG_SPIN_RATE'][:]) self.arr_spin_ind += [ind for ep in cdf['Epoch']] # Request a clean-up of the files in the data directory. self.cleanup_file()