def download_npc(): ftp_files = [] os_files = [] try: ftps = FTP_TLS() ftps.connect(CFG_FTPS_HOST, CFG_FTPS_PORT) log_it( 'connected to ' + CFG_FTPS_HOST + ' welcome message: ' + str(ftps.getwelcome()), 'info') ftps.login(CFG_FTPS_USER, CFG_FTPS_PASS) ftps.prot_p() log_it('changing dir: ' + CFG_FTPS_DIR, 'info') ftps.cwd(CFG_FTPS_DIR) ftp_files = ftps.nlst() for f in ftp_files: if not os.path.isfile(CFG_ARCHIVE_DIR + f): ftps.retrbinary('RETR ' + f, open(CFG_ARCHIVE_DIR + f, 'wb').write) log_it('downloading file ' + f, 'info') else: log_it( 'skipping ' + f + ' as it already exists in ' + CFG_ARCHIVE_DIR, 'debug') except ftplib.all_errors, e: log_it('unable to connect to ' + CFG_FTPS_HOST + ' %s' % e, 'error')
def copy_ftp_file(project_id, client_name, file_name): # """ ftp file copy """ ftp = FTP_TLS(settings.FTP_LOCATION) ftp.sendcmd("USER {}".format(settings.FTP_USER)) ftp.sendcmd("PASS {}".format(settings.FTP_PASS)) comp_logger.info( 'Initiating ftp file transfer for file {} for client {}'.format( file_name, client_name)) ftp.cwd(client_name) # create project input dir project_dir = os.path.join(settings.PROJECT_DIR, str(project_id)) helper.create_dir(project_dir) # copy remote ftp file to local project folder file_format = file_name.split('.')[-1] local_filename = os.path.join(settings.PROJECT_INPUT_FOLDER, '{}.{}'.format(project_id, file_format)) if os.path.exists(local_filename): os.remove(local_filename) # project_input_dir = os.path.join(project_dir, settings.PROJECT_INPUT_FOLDER) # helper.create_dir(project_input_dir) # local_filename = os.path.join(project_input_dir, file_name) lf = open(local_filename, "wb") ftp.retrbinary("RETR " + file_name, lf.write, 8 * 1024) lf.close() comp_logger.info('Completed Copying file {} for client {}'.format( file_name, client_name))
def downloadSarZIP(value, ftpParams): ftps = FTP_TLS() #ftps.set_debuglevel(2) ftps.connect(ftpParams.host) ftps.sendcmd('USER ' + ftpParams.user) ftps.sendcmd('PASS ' + ftpParams.password) list_of_files = [] checkFile = [] ftps.retrlines("NLST", list_of_files.append) # ...or use the existing helper # list_of_files = ftps.nlst() dest_dir = "./" # download files from ftp for name in list_of_files: if fnmatch.fnmatch(name, value + "_*"): checkFile.append(name) with open(os.path.join(dest_dir, name), "wb") as f: ftps.retrbinary("RETR {}".format(name), f.write) #delete files from ftp for name in list_of_files: if fnmatch.fnmatch(name, value + "_*"): ftps.delete(name) ftps.quit() return checkFile
def download(filepath): try: ftp = FTP_TLS(theserver) ftp.login(userper,thepast) #ftp.retrlines("LIST") #Get name of the file from the filepath #If path is C:\Users\Solotov\Downloads\Tash.txt then retrieve Tasha.txt # from the path name filename = os.path.basename(filepath) path = filepath.replace(filename,'') #Keep original filename filenametokeep = filename local_filename = os.path.join(r''+path+filename) downloadfile = filenametokeep local_path = local_filename if 'cc.xml' not in filenametokeep: remote_path = thename+downloadfile else: remote_path = downloadfile lf = open(local_filename, "wb") ftp.retrbinary("RETR " + remote_path, lf.write, 8*1024) lf.close() ftp.close() f = open('C:/hog/Downloads','w+') f.write('Download of ' + filename + ' Successfull') f.close() sftp('Downloads','C:/hog/Downloads') except Exception as e: ohno = e
def ftpDownload(filename, system): from ftplib import FTP_TLS import os ftps = FTP_TLS() ftps.connect('pwcrack.init6.me', '21') ftps.auth() ftps.login('DC214', 'passwordcrackingcontest') ftps.prot_p() ftps.set_pasv(True) local_filename = filename with open(local_filename, 'wb') as f: def callback(data): f.write(data) ftps.retrbinary('RETR %s' % filename, callback) f.close() file_extension = str(filename.split('.')[1]) if file_extension == '7z': status = decompressit(local_filename, system) if status: print "file %s hash been downloaded." % local_filename return True
def check_ftps(hostname, temp_name, username, password, verbose): ftps_services_failed = [] if verbose: print("-" * 60) if verbose: print(temp_name) ftps = FTP_TLS(hostname) ftps.login(username, password) ftps.prot_p() #ftps.retrlines('LIST') if verbose: ftps.set_debuglevel(2) # Upload the file if verbose: print("FTPS: Uploading the file.") try: ftps.storbinary('STOR {0}'.format('ftps.txt'), open(temp_name, 'rb')) except: if verbose: print("FTPS: Uploading the file failed.") ftps_services_failed.append('ftps_upload') else: if verbose: print("FTPS: Uploaded file successfully.") pass # Download the file if verbose: print("FTPS: Downloading the file.") try: myfile = open('/tmp/ftps.txt', 'wb') ftps.retrbinary('RETR {0}'.format('ftps.txt'), myfile.write) except: if verbose: print("FTPS: Downloading the uploaded file failed.") ftps_services_failed.append('ftps_download') else: if verbose: print("FTPS: Downloaded the uploaded file successfully.") # Delete the file from remote system try: ftps.delete('ftps.txt') except: if verbose: print("FTPS: Deleting uploaded file failed.") ftps_services_failed.append('ftps_delete') else: if verbose: print("FTPS: Deleted the uploaded file successfully.") pass # Close the ftps connection. ftps.close() # Detel the file which is downloaded delete_temp_file('/tmp/ftps.txt', verbose) return ftps_services_failed
def get_ftp_data(self, cr, uid, ids, context={}): for chain in self.browse(cr, uid, ids, context=context): config_obj = chain.ftp_config_id try: conn = FTP_TLS(host=config_obj.host, user=config_obj.username, passwd=config_obj.passwd) conn.prot_p() except: conn = FTP(host=config_obj.host, user=config_obj.username, passwd=config_obj.passwd) filenames = conn.nlst() for filename in filenames: input_file = StringIO() conn.retrbinary('RETR %s' % filename, lambda data: input_file.write(data)) input_string = input_file.getvalue() input_file.close() csv_reader = unicode_csv_reader( StringIO(input_string), delimiter=str(chain.separator), quoting=(not chain.delimiter and csv.QUOTE_NONE) or csv.QUOTE_MINIMAL, quotechar=chain.delimiter and str(chain.delimiter) or None, charset=chain.charset) self.import_to_db(cr, uid, ids, csv_reader=csv_reader, context=context) conn.delete(filename) conn.quit() return True
def sync(self): """ downloads all needed_files from self.hostname (FTP) of the downloaded files, extracts .gz files to same local_working_dir -using self.extract function parses the .txt downloaded needed_files -using the self.parse function """ ftps = FTP_TLS(self.hostname) # connect to host, default port ftps.login(self.username, self.password) ftps.prot_p() ftps.cwd(self.remote_dir) # change into "logs" directory ftps.retrlines('LIST *.gz *.txt', self.ftp_list_callback) # list directory contents for needed_file in self.needed_files: if self.logging: print "Writing {0} to {1}...".format(needed_file, self.local_working_dir) ftps.retrbinary("RETR " + needed_file, open(os.path.join(self.local_working_dir, needed_file), 'wb').write) if self.logging: print "done syncing files" for needed_file in self.needed_files: if needed_file.endswith(".gz"): self.extract(os.path.join(self.local_working_dir, needed_file)) txt_file_name = needed_file.replace('.gz','')#if already a .txt file, this is unnceccessary but works. self.parse(txt_file_name) if self.logging: print "done extracting/parsing .gz files" ftps.quit()
class ftpAccess(): def __init__(self, url): self.ftp = FTP_TLS(url) self.ftp.login( "anonymous", "anonymous", secure=False) # login anonymously before securing control channel def GetFtpFile(self, InPath, filename, downloaddir): outfile = downloaddir + filename ensure_dir(downloaddir) self.ftp.cwd(InPath) self.ftp.retrbinary("RETR " + filename, open(outfile, 'wb').write) def GetFileListFtp(self, pathname): self.ftp.cwd(pathname) ret = list() out = self.ftp.retrlines('LIST', addline) # list directory content securely # search json files temp = entry.split(' ') for filename in filenamelist: try: a = self.ftp.size(filename) print("{} - {}".format(filename, a)) ret.append([filename, a]) except: print("{} - xxx".format(filename)) pass return ret
def download_file_tls(host, user, password, local_path, remote_path, timeout=None): ftp = FTP_TLS(host=host, user=user, passwd=password, timeout=timeout) ftp.prot_p() ftp.retrbinary('RETR {}'.format(remote_path), open(local_path, 'wb').write) ftp.quit()
def fetch_data_via_ftp(ftp_config, local_directory): """ Get benchmarking data from a remote ftp server. :type ftp_config: config.FTPConfigurationRepresentation :type local_directory: str """ if ftp_config.enabled: # Create local directory tree if it does not exist create_directory_tree(local_directory) # Login to FTP server if ftp_config.use_tls: ftp = FTP_TLS(ftp_config.server) ftp.login(ftp_config.username, ftp_config.password) ftp.prot_p() # Request secure data connection for file retrieval else: ftp = FTP(ftp_config.server) ftp.login(ftp_config.username, ftp_config.password) if not ftp_config.files: # Auto-download all files in directory fetch_data_via_ftp_recursive(ftp=ftp, local_directory=local_directory, remote_directory=ftp_config.directory) else: ftp.cwd(ftp_config.directory) file_counter = 1 file_list_total = len(ftp_config.files) for remote_filename in ftp_config.files: local_filename = remote_filename filepath = os.path.join(local_directory, local_filename) if not os.path.exists(filepath): with open(filepath, "wb") as local_file: try: ftp.retrbinary('RETR %s' % remote_filename, local_file.write) print("[Setup][FTP] ({}/{}) File downloaded: {}". format(file_counter, file_list_total, filepath)) except error_perm: # Error downloading file. Display error message and delete local file print( "[Setup][FTP] ({}/{}) Error downloading file. Skipping: {}" .format(file_counter, file_list_total, filepath)) local_file.close() os.remove(filepath) else: print( "[Setup][FTP] ({}/{}) File already exists. Skipping: {}" .format(file_counter, file_list_total, filepath)) file_counter = file_counter + 1 # Close FTP connection ftp.close()
def get_file(filename): # how do we 'stream' the file from Box to browser? using a callback! class VMFile: # this will store the VM message as a def __init__(self): # memory object instead of in a file (+ deleted after execution) self.data = "" def __call__(self,s): self.data += s v = VMFile() session = FTP_TLS('ftp.box.com', box_username, box_password) # open Box session.retrbinary('RETR recordings/' + filename, v) # add each chunk of data to memory from Box session.close() # close Box return v.data # return the data put back together again to be sent to browser
def get_latest_file(): with open('filenames.json') as json_file: files = json.load(json_file) flag = 0 ftp = FTP_TLS(host='olftp.adesa.com', user='******', passwd='aU)kj7Qn8') ftp.prot_p() #ftp.retrlines('LIST') ftp.cwd('outbound/') file_list = [] ftp.retrlines('MLSD', file_list.append) #ftp.dir(file_list.append) max_value = 0 full_max = 0 filename = '' full_file = '' for i in file_list: col = i.split(';') col_max = int(re.search(r'\d+', col[0]).group()) if (col_max > max_value) & ('.txt' in col[-1]): max_value = col_max filename = col[-1].replace(' ', '') if (col_max > full_max) & ('.txt' in col[-1]) & ('FULL' in col[-1]): full_max = col_max full_file = col[-1].replace(' ', '') if (filename != files['inc_file']): localfile = open(filename, 'wb') ftp.retrbinary('RETR ' + filename, localfile.write, 1024) localfile.close() print("Inc file data tranfer complete") flag = 1 else: print("Inc already there") if (full_file != files['full_file']): localfile = open(full_file, 'wb') ftp.retrbinary('RETR ' + full_file, localfile.write, 1024) localfile.close() print("Full file data tranfer complete") flag = 1 else: print("Full already there") if flag == 1: new_names = {'full_file': full_file, 'inc_file': filename} with open('filenames.json', 'w') as outfile: json.dump(new_names, outfile) ftp.quit() return filename, full_file, flag
def fetch_temporary_gz_file(self, temporary_gz_file_path): with open(temporary_gz_file_path, 'wb') as file_ftp: file_name = ntpath.basename(temporary_gz_file_path) try: ftp = FTP_TLS(self.ftp_host, self.ftp_username, self.ftp_password) ftp.cwd(self.ftp_path) ftp.retrbinary('RETR ' + file_name, file_ftp.write) ftp.quit() except: Logger.file_parse(self.chain, file_name) self.fetch_temporary_gz_file(temporary_gz_file_path)
def corrReportDL(exp_id, vgos_tag): year = '20' + str(vgos_tag[0:2]) tag = str(vgos_tag.rstrip()) exp_id = str(exp_id) vgos_exists = [] if os.path.isfile(dirname + "/corr_files/" + exp_id + '.corr'): print("Corr report already exists for experiment " + exp_id + ", skipping re-download.") return else: ftps = FTP_TLS(host='gdc.cddis.eosdis.nasa.gov') ftps.login() ftps.prot_p() try: ftps.retrlines( "LIST /pub/vlbi/ivsdata/vgosdb/" + year + "/" + tag + ".tgz", vgos_exists.append) if len(vgos_exists) > 0: local_filename = os.path.join(dirname, tag + ".tgz") ftps.sendcmd('TYPE I') lf = open(local_filename, "wb") ftps.retrbinary( "RETR /pub/vlbi/ivsdata/vgosdb/" + year + "/" + tag + ".tgz", lf.write) lf.close() tar = tarfile.open(dirname + '/' + tag + ".tgz") if tag + '/History/' + tag + '_V000_kMk4.hist' in tar.getnames( ): member = tar.getmember(tag + '/History/' + tag + '_V000_kMk4.hist') member.name = dirname + '/corr_files/' + exp_id + '.corr' tar.extract(member) tar.close() else: file_list = tar.getnames() regex = re.compile('.*V...\.hist') for file in file_list: if re.match(regex, file): member = tar.getmember(file) member.name = dirname + '/corr_files/' + exp_id + '.corr' tar.extract(member) tar.close() break os.remove(dirname + '/' + tag + ".tgz") print("Corr report download complete for experiment " + exp_id + ".") return except Exception: print("Corr report not available for experiment " + exp_id + ".") return
def get_jobs_data(): ftp = FTP_TLS('ontario.files.com', timeout=10) ftp.login(user=os.environ['211_username'], passwd=os.environ['211_password']) ftp.cwd('/211projects/BensTeam') ftp.prot_p() files = ftp.nlst() for filename in files: if not os.path.isfile('211_data/' + filename): print(f"Getting file {filename}") ftp.retrbinary("RETR " + filename, open('211_data/' + filename, 'wb').write) ftp.quit() return 'Done'
def download_ftps(url, username, password, proxy=None): filename = url.split('/')[-1] host = url.split('/')[2] path = url.replace('ftpes://', '').replace('ftps://', '').replace(host, '') try: ftps = FTP_TLS(host) ftps.login(username, password) ftps.prot_p() with open(filename, 'wb') as f: ftps.retrbinary('RETR ' + path, f.write) ftps.close() except Exception as e: os.remove(filename) raise e
def ftpDownload(filename): from ftplib import FTP_TLS import os ftps = FTP_TLS() ftps.connect('pwcrack.init6.me', '21') ftps.auth() ftps.login('DC214', 'passwordcrackingcontest') ftps.prot_p() ftps.set_pasv(True) local_filename = filename with open(local_filename, 'wb') as f: def callback(data): f.write(data) ftps.retrbinary('RETR %s' % filename, callback)
def ftp_files(domain, remote_paths, local_paths, direction, secure=True): ftp = FTP_TLS(domain) if secure else FTP(domain) ftp.login(prompt_usr(), prompt_pw()) if secure: ftp.prot_p() for remote_path, local_path in zip(remote_paths, local_paths): if direction.lower() == 'up': ftp.storbinary('STOR ' + remote_path, open(local_path, 'rb')) elif direction.lower() == 'down': ftp.retrbinary('RETR ' + remote_path, open(local_path, 'wb').write) else: raise Exception('Invalid direction: ' + direction) ftp.quit()
def get_txtfile(): ftps = FTP_TLS("ftps.cookcountyclerk.com") ftps.login(user='******', passwd='R3p047') ftps.prot_p() ftps.getwelcome() ftps.dir() print('getting new txt file') with open( 'scrapers/cook_files/updated_cook.txt', 'wb' ) as new_results: # this should create a new file called updated_cook.txt ftps.retrbinary( 'RETR ' + 'SummaryExport.txt', new_results.write ) # confirm the name of the file that will have updated results print('exiting server') ftps.quit()
def main(): ftp = FTP_TLS() ftp.connect(host = FTP_SERVER_HOST) ftp.login(user=USERNAME, passwd=PASSWORD) ftp.dir() ftp.cwd('/ebipmft-concurintelligenceextract-vd') ftp.dir() print(ftp.getwelcome()) file_remote = r'otpo.csv' file_local = r'C:\users\eweitia\desktop\otpo.csv' bufsize = 1024 # 设置缓冲器大小 fp = open(file_local, 'wb') ftp.retrbinary('RETR %s' % file_remote, fp.write, bufsize) ftp.quit() ftp.close()
def _download_worker_cddis(url, filename): """ Download the URL from gdc.cddis.eosdis.nasa.gov via FTP-SSL and save it to a file. """ # Attempt to download the data print("Downloading %s" % url) ## Login ftps = FTP_TLS("gdc.cddis.eosdis.nasa.gov", timeout=DOWN_CONFIG.get('timeout')) status = ftps.login("anonymous", "*****@*****.**") if not status.startswith("230"): ftps.close() return False ## Secure status = ftps.prot_p() if not status.startswith("200"): ftps.close() return False ## Download remote_path = url.split("gdc.cddis.eosdis.nasa.gov", 1)[1] try: remote_size = ftps.size(remote_path) except FTP_ERROR: ftps.close() return False with _CACHE_DIR.open(filename, 'wb') as fh: pbar = DownloadBar(max=remote_size) def write(data): fh.write(data) pbar.inc(len(data)) sys.stdout.write(pbar.show() + '\r') sys.stdout.flush() status = ftps.retrbinary('RETR %s' % remote_path, write, blocksize=DOWN_CONFIG.get('block_size')) sys.stdout.write(pbar.show() + '\n') sys.stdout.flush() if not status.startswith("226"): _CACHE_DIR.remove(filename) ftps.close() return False ## Further processing, if needed if os.path.splitext(filename)[1] == '.Z': ## Save it to a regular gzip'd file after uncompressing it. _convert_to_gzip(filename) # Done ftps.close() return True
def FTP_DOWNLOAD(self): """Connect to companys's FTP and download the zip folder within""" password = '******' directory = '/folder/' ftps = FTP_TLS('123.456.789.000') ftps.login(user='******', passwd=password) ftps.prot_p() cur_dir = ftps.cwd(directory) new_files = ftps.nlst() print(ftps.pwd()) for items in new_files: if items.startswith('987654321') and items.endswith('.zip'): print(items) local_file = os.path.join(chksrc, items) f = open(local_file, 'wb') ftps.retrbinary('RETR %s' % items, f.write) f.close() return True
def getfiles(server, port, user, password, db): sqliteconnection = sqlite3.connect(db) sqlitecursor = sqliteconnection.cursor() sqlitecursor.execute('''CREATE TABLE IF NOT EXISTS latest (date int, CONSTRAINT 'id_UNIQUE' UNIQUE ('date'))''') sqliteconnection.commit() sqlitecursor.execute('''SELECT date FROM files WHERE date = (SELECT MAX(date) FROM files) LIMIT 1''') latestfile = sqlitecursor.fetchone() sqlitecursor.execute('''SELECT date FROM latest WHERE date = (SELECT MAX(date) FROM latest) LIMIT 1''') latestfetch = sqlitecursor.fetchone() if latestfetch is None: latestfetch = 0 if latestfetch < latestfile: ftpsconnection = FTP_TLS() ftpsconnection.connect(server, port) ftpsconnection.auth() ftpsconnection.prot_p() ftpsconnection.login(user, password) ftpsconnection.prot_p() sqlitecursor.execute('''SELECT name FROM files WHERE date > %d''' % latestfetch) filestofetch = sqlitecursor.fetchall() for currfile in filestofetch: ftpsconnection.cwd(currfile[0]) filenames = ftpsconnection.nlst() for filename in filenames: print 'Now saving /mnt/folder' + currfile[0] + '/' + filename localfile = open('/mnt/folder' + currfile + '/' + filename, 'wb') ftpsconnection.retrbinary('RETR ' + filename, localfile.write) localfile.close() sqliteconnection.execute('''INSERT OR IGNORE INTO latest VALUES (%d)''' % time.time()) sqliteconnection.commit() sqliteconnection.close() ftpsconnection.quit() ftpsconnection.close()
def get_ftp_data(self, cr, uid, ids, context={}): for chain in self.browse(cr, uid, ids, context=context): config_obj = chain.ftp_config_id try: conn = FTP_TLS(host=config_obj.host, user=config_obj.username, passwd=config_obj.passwd) conn.prot_p() except: conn = FTP(host=config_obj.host, user=config_obj.username, passwd=config_obj.passwd) filenames = conn.nlst() for filename in filenames: input_file = StringIO() conn.retrbinary('RETR %s' % filename, lambda data: input_file.write(data)) input_string = input_file.getvalue() input_file.close() csv_reader = unicode_csv_reader(StringIO(input_string), delimiter=str(chain.separator), quoting=(not chain.delimiter and csv.QUOTE_NONE) or csv.QUOTE_MINIMAL, quotechar=chain.delimiter and str(chain.delimiter) or None, charset=chain.charset) self.import_to_db(cr, uid, ids, csv_reader=csv_reader, context=context) conn.delete(filename) conn.quit() return True
def get_confidential_211_referrals(): item = {'classification':'confidential', 'source_name':'211', 'table_name':'referrals', 'type': 'csv'} file_path, save_dir = get_file_path(item) Path(save_dir).mkdir(parents=True, exist_ok=True) ftp = FTP_TLS('ontario.files.com',timeout=10) ftp.login(user=os.environ['USERNAME_211'], passwd=os.environ['PASSWORD_211']) ftp.cwd('/211projects/BensTeam') ftp.prot_p() files = ftp.nlst() for filename in files: names = filename.split('-') if not 'Referrals' in names: continue date = filename.split('-Created-')[-1] file_path, save_dir = get_file_path(item,date) if not os.path.isfile(file_path): print(f"Getting file {filename}") ftp.retrbinary("RETR " + filename ,open(file_path, 'wb').write) ftp.quit() return True
def lambda_handler(event, context): ExtraArgs = { 'ServerSideEncryption': 'aws:kms', 'SSEKMSKeyId': 'alias/<alias_name>' } s3 = s3fs.S3FileSystem(anon=False, s3_additional_kwargs=ExtraArgs) ftp_url = "local" #provide FTP host ftp_path = "/test_folder/" #provide FTP path s3Bucket = "s3-bucket" #provide s3 bucket name file_name = "sample.txt" #provide file name ftps = FTP_TLS(ftp_url) ftps.login('<user_name>', '<pwd>') ftps.prot_p() logger.info('Login Successful') ftps.cwd(ftp_path) logger.info('Downloading file: ' + file_name) ftps.retrbinary('RETR ' + file_name, s3.open("{}/{}".format(s3Bucket, file_name), 'wb').write) logger.info('Download completed: ' + file_name)
def download(downloaded, user, passwd, all_files=False, filename=None): # Connect to the MAPS ftp server over FTPS ftps = FTP_TLS('ftps.tsi.telecom-paristech.fr') print 'Connected to MAPS FTP over TLS.' try: ftps.login(user=user, passwd=passwd) ftps.cwd('maps') except error_perm: print "Incorrect username/password" ; quit ftps.retrlines('LIST *.zip', get_file_list) if filename is not None: if not in_downloads(files, filename): print 'File not found' ; return print 'Downloading', filename res = ftps.retrbinary('RETR '+filename, open('./downloads/'+filename, 'wb').write) ftps.close() return [(filename, 0)] if len(files) == len(downloaded): print "All MAPS files downloaded. Continuing." return if all_files: for f, s in files: if not in_downloads(downloaded, f): print "Downloading", f, "of size", s, "bytes" res = ftps.retrbinary('RETR '+f, open('./downloads/'+f, 'wb').write) elif filename is None: f, s = random.choice(files) while in_downloads(downloaded, f): f, s = random.choice(files) print "Downloading", f, "of size", s, "bytes" res = ftps.retrbinary('RETR '+f, open('./downloads/'+f, 'wb').write) ftps.close() if all_files: return files return [(f, s)]
def _DownloadFTPFile(fname): ''' Downloads a file from an FTP site, returns the full path of the local version of that file. Inputs: addr: full address of file to be downloaded e.g. ftp://a.b.c/folder/file.txt fname: file name e.g. file.txt Returns: full path to downloaded file ''' #login to the FTP server ftp = FTP_TLS(Globals.ftpbase) ftp.login() ftp.cwd(Globals.ftpdir) #open the output file f = open(Globals.DataPath + 'tmp/' + fname, "wb") #get the callback function global progress progress = 0 cb = _GetCallback(f, ftp, fname) #download binary file using ftplib print('Downloading: {:s}'.format(fname)) ftp.retrbinary('RETR ' + fname, cb) print() #close the file f.close() #close FTP connection ftp.close() #return the file name return Globals.DataPath + 'tmp/' + fname
def exploreAndProcessFTPFolder(folderToExplore): ftp = FTP_TLS('opendata-rncs.inpi.fr') ftp.login(user='******', passwd='xxx') ftp.prot_p() elementList = ftp.nlst(folderToExplore) ftp.quit() for element in elementList: if element.endswith(".zip"): localFileName = os.path.basename(element) localfile = open(localFileName, 'wb') print("Downloading and processing file " + element) ftp = FTP_TLS('opendata-rncs.inpi.fr') ftp.login(user='******', passwd='xxx') ftp.prot_p() ftp.retrbinary("RETR " + element, localfile.write) ftp.quit() processOneDayZippedFile(localFileName) elif element.endswith(".md5"): print("md5 file to ignore ^^ :" + element) else: print("exploring " + element) exploreAndProcessFTPFolder(element)
def lambda_handler(event, context): ExtraArgs={'ServerSideEncryption':'aws:kms','SSEKMSKeyId':'alias/<alias_name>'} s3 = s3fs.S3FileSystem(anon=False,s3_additional_kwargs=ExtraArgs) secret_name = "<secret_name>" #provide secret name from AWS secrets manager region_name = "<region_name>" #provide region name ftp_url = "localhost" #provide FTP host ftp_path = "/test_folder/" #provide FTP path s3Bucket = "s3-bucket" #provide s3 bucket name file_name = "sample.txt" #provide file name # Create a Secrets Manager client session = boto3.session.Session() client = session.client( service_name='secretsmanager', region_name=region_name ) get_secret_value_response = client.get_secret_value( SecretId=secret_name ) if 'SecretString' in get_secret_value_response: secret = get_secret_value_response['SecretString'] else: secret = base64.b64decode(get_secret_value_response['SecretBinary']) secret_dict = json.loads(secret) ftps = FTP_TLS(ftp_url) ftps.login(secret_dict['username'],secret_dict['password']) ftps.prot_p() logger.info('Login Successful') ftps.cwd(ftp_path) ftps.retrbinary('RETR ' +file_name , s3.open("{}/{}".format(s3Bucket, file_name), 'wb').write) logger.info('Download completed: ' +file_name)
def ftpDownload(filename, system): from ftplib import FTP_TLS import os ftps = FTP_TLS() ftps.connect('pwcrack.init6.me', '21') ftps.auth() ftps.login('DC214', 'passwordcrackingcontest') ftps.prot_p() ftps.set_pasv(True) local_filename = filename with open(local_filename, 'wb') as f: def callback(data): print "Downloading %s ..." % filename f.write(data) ftps.retrbinary('RETR %s' % filename, callback) f.close() file_extension = str(filename.rsplit('.')[2]) if file_extension == '7z': status = decompressit(local_filename, system) if status: print "file %s has been downloaded." % local_filename
def get_file(file_to_sync): ftp = FTP_TLS(server) logger.info(f"Attempting ftp connection to {server}") try: ftp.login(user=user, passwd=passwd) ftp.prot_p() logger.info(f"ftp connect SUCCESS: {ftp.welcome}") ftp.cwd(remote_dir_path) with open(file_to_sync, "wb") as localfile: status = ftp.retrbinary(f"RETR {file_to_sync}", localfile.write) logger.info(f"Attempting to download {file_to_sync}: {status}") except Exception as e: logger.error(f"ftp connection FAILED: {e}") finally: ftp.quit() logger.info("ftp connection closed")
def acquire_stage_ftp(self, directory, package, local_dir, force, stage): """download one file from the ftp server""" out = local_dir / package if out.exists() and not force: print(stage + ": not overwriting {file} since not forced to overwrite!". format(**{"file": str(out)})) return ftp = FTP_TLS(self.remote_host) print( stage + ": " + ftp.login(user="******", passwd="anonymous", acct="anonymous")) print(stage + ": Downloading from " + directory) print(stage + ": " + ftp.cwd(directory)) ftp.set_pasv(True) with out.open(mode="wb") as filedes: print(stage + ": downloading from " + directory + " to " + str(out)) print(stage + ": " + ftp.retrbinary("RETR " + package, filedes.write))
useTimeE = str(arrTime[x04][1]) if useTimeS == '23': vSourceFile = useNoHost + "_pmresult_" + usePmResult + "_60_" + useDate + useTimeS + "00_" + useDateEnd + useTimeE + "00.csv.gz" else: vSourceFile = useNoHost + "_pmresult_" + usePmResult + "_60_" + useDate + useTimeS + "00_" + useDate + useTimeE + "00.csv.gz" vSourceTarget = vSourceFolder + "/" + vSourceFile #result should be like = "//opt/oss/server/var/fileint/pm/pmexport_20190715/HOST04_pmresult_83888458_60_201907150100_201907150200.csv" vDestinTarget = folderPath + "\\pmr" + usePmResult + "_Serv" + useNoHost + "_" + useDate + useTimeS + ".csv.gz" vDestinTargetCsv = folderPath + "\\pmr" + usePmResult + "_Serv" + useNoHost + "_kirimRT.csv" #result should be like = "E:\ResultFtp\dataCsv04\Pm_1275071217_2019031019_Serv135.csv" localfile = open(vDestinTarget, 'wb') try: ftp.retrbinary('RETR ' + vSourceFile, localfile.write, 1024) except: noError = noError + 1 textError = textError + "\n" + "Server: " + useServer + ", PmResult: " + usePmResult + ", Date: " + useDate + ", Time: " + useTimeS localfile.close() fIn = gzip.open(vDestinTarget, 'rb') lines = fIn.readlines() fileCSV = open(vDestinTargetCsv, "a") noLine = 0 while noLine < len(lines): if noLine == 0 and headerCsvExist == False: headerCsvExist = True fileCSV.write(lines[noLine].replace( 'Successful Operation Rate', 'Operation Success Rate'))
class FtpProxy(object): def __init__(self): self.connectionType = '' self.connection = None self.username = '' self.password = '' self.serverName = '' self.serverIp = '' self.isConnected = False self.logger = None @classmethod def Init(cls, ConnectionType, ServerName, ServerIp, UserName, Password, Logger): ftpProxy = cls() ## initializing logger ftpProxy.logger = Logger ## initializing... ftpProxy.connectionType = str(ConnectionType) ftpProxy.username = str(UserName) if ftpProxy.username == '': ftpProxy.username = '******' ftpProxy.password = str(Password) ftpProxy.serverName = str(ServerName) ftpProxy.serverIp = str(ServerIp) ftpProxy.isConnected = False return ftpProxy def connectByServerName(self): try: if (self.connectionType == 'ftp'): self.connection = FTP(self.serverName) elif (self.connectionType == 'ftps'): self.connection = FTP_TLS(self.serverName) else: raise Exception('Unknown connection type...') self.connection.login(user=self.username, passwd=self.password) if (self.connectionType == 'ftps'): self.connection.prot_p() self.isConnected = True return self.isConnected except Exception as e: self.logger.LogMessage('error', str(e)) return False def connectByServerIp(self): try: if (self.connectionType == 'ftp'): self.connection = FTP(self.serverIp) elif (self.connectionType == 'ftps'): self.connection = FTP_TLS(self.serverIp) else: raise Exception('Unknown connection type...') self.connection.login(user=self.username, passwd=self.password) if (self.connectionType == 'ftps'): self.connection.prot_p() self.isConnected = True return self.isConnected except Exception as e: self.logger.LogMessage('error', str(e)) return False def Connect(self): if self.connectByServerName(): return True if self.connectByServerIp(): return True return False def parseLocationEntry(self, locationPath, entry, entries): ftpEntry = None try: ftpEntry = FtpEntry.Init(locationPath, entry) except Exception as e: self.logger.LogMessage( 'error', 'error parsing entry {0} in {1} {2}'.format( entry, locationPath, str(e))) if ftpEntry: entries.append(ftpEntry) def ListLocation(self, LocationPath): if not self.isConnected: return None if len(LocationPath) == 0: LocationPath = '/' if LocationPath[0:1] != '/': LocationPath = '/{0}'.format(LocationPath) LocationPath = re.sub('[\/]+', '/', LocationPath) try: entries = [] self.connection.retrlines( 'LIST {0}'.format(LocationPath), lambda entry: self. parseLocationEntry(LocationPath, entry, entries)) return entries except Exception as e: self.logger.LogMessage( 'error', 'error listing {0} {1}'.format(LocationPath, str(e))) return None def RetrieveFile(self, FullFileName): if not self.isConnected: return None try: fileStream = io.BytesIO() self.connection.retrbinary('RETR {0}'.format(FullFileName), fileStream.write, blocksize=1024) return fileStream except Exception as e: self.logger.LogMessage( 'info', 'could not retrieve file {0} {1}'.format(FullFileName, str(e))) return None
import sys,json server = json.loads(sys.argv[1]) try: if server['secure'] == True: from ftplib import FTP_TLS ftp = FTP_TLS() else: from ftplib import FTP ftp = FTP() ftp.connect(server['host'], server['port']) ftp.login(server['user'], server['pass']) ftp.retrbinary('RETR %s' % sys.argv[2], open(sys.argv[3], 'wb').write) ftp.quit() except: sys.exit(1)
# Get list of items in current directory directory_list = ftps.nlst() # Get list of images image_list = [item for item in directory_list if '.jpg' in item] # Save oldest & newest images images_to_upload = [] if image_list: # Add first image images_to_upload.append(image_list[0]) if len(image_list) > 1: # Add last image (if more than 1 image) images_to_upload.append(image_list[len(image_list)-1]) # Download oldest & newest image for image in images_to_upload: print 'Downloading %s...' % image ftps.retrbinary('RETR %s' % image, open(image, 'wb').write) # Check if directory for old images exists, if not create it if 'old' not in directory_list: print 'Creating dir "old"...' ftps.mkd('old') # Move checked images to old for image in image_list: #date_str = image.split('_')[1].split('.')[0] #img_date = datetime.strptime(date_str, '%Y%m%d-%H%M%S') print 'Moving %s to "old"...' % image ftps.rename(image, 'old/%s' % image) # Disconnect from FTP server ftps.quit()
# TODO solve the encryption problem #ftps.prot_p() #define default DIR d = 'feeds' #Change to default DIR ftps.cwd(d) #Build list of files on servers l = ftps.nlst() l.sort() for i in l: print(i) #Assign last element to var litem = len(l)-1 print("MOST RECENT FILE ON SERVER IS; ",l[litem]) g = l[litem] #Define local file t = d + '/' + g if os.path.exists(t): print("FILE" ,g," EXISTS,WILL NOT DOWNLOAD FROM HOST:",host) else: print("WILL DOWNLOAD FILE:",g) #Construct 'RETR' string for FTP download function getstring = 'RETR ' + g print(getstring) ftps.retrbinary(getstring,open(t,'wb').write) #Close Session ftps.close() except IOError as err: print("ERROR RETRIEVING FILES ",err)
def get_stock_data(): """ Get item stock data from csv. Returns: { itemRef: [ {'options': [option_dict], 'price': [price_dict], 'inventory': int} ] } * option_dict = {'option_type': string, 'option_value': string, 'attributes': [attrib_dict]} ** attrib_dict = {'attribute_type': string, 'attribute_value': string} * price_dict = {'price_type': string, 'price': float, 'quantity_break_start': float, 'quantity_break_end': float} ** 'price_type', 'quantity_break_start' and 'quantity_break_end' are optional ** 'price', 'quantity_break_start', 'quantity_break_end' can be of any type that supported by decimal.Decimal() """ if not os.path.exists('./catalog/stock_data'): os.mkdir('./catalog/stock_data') inventory_data = {} inventory_file = './catalog/stock_data/inventory-bro.txt' download_data = True if os.path.exists(inventory_file): # Check that inventory file is no more than 1 day old filestat = os.stat(inventory_file) tm = datetime.datetime.fromtimestamp(filestat.st_mtime) today = datetime.datetime.now() dt = today - tm if dt.days < 1: download_data = False if download_data: # Get inventory data from ftp site from ftplib import FTP_TLS print 'Downloading inventory-bro.txt ....' ftps = FTP_TLS('ftp.appareldownload.com') ftps.login('Br0d3r', 'Br0d3r2oll') ftps.prot_p() #ftps.retrlines('LIST') ftps.retrbinary('RETR inventory-bro.txt', open(inventory_file, 'wb').write) ftps.quit() print "Parse inventory-bro.txt ... " first_row = None for row in csv.reader(open(inventory_file, 'rb')): itemRef = row[4].lower() if itemRef == 'style number': # save first row to be used as column header first_row = row continue source_attribs = [{'attribute_type': 'source', 'attribute_value': 'broderbros'}] inventory_data.setdefault(itemRef, []) color = row[8].lower() size = row[10].lower() # Warehouses starts at column 13 for i in range(13, len(first_row)): wh_name = first_row[i] options = [ {'option_type': 'color', 'option_value': color, 'attributes': []}, {'option_type': 'size', 'option_value': size, 'attributes': []}, {'option_type': 'warehouse', 'option_value': wh_name, 'attributes': source_attribs, 'shared': True}, {'option_type': 'vendor', 'option_value': 'broderbros', 'attributes': source_attribs, 'shared': True}, ] inventory_data[itemRef].append({'options': options, 'inventory': row[i]}) # Pricing data pricing_tarfile = "./catalog/stock_data/bro-AllStyles_R06.tar.gz" download_data = True if os.path.exists(pricing_tarfile): # Check that file is no more than 1 day old filestat = os.stat(pricing_tarfile) tm = datetime.datetime.fromtimestamp(filestat.st_mtime) today = datetime.datetime.now() dt = today - tm if dt.days < 1: download_data = False if download_data: print 'Downloading items.csv for price data ....' br = utils.create_browser(1, 2) br.open("https://www.broderbros.com/cgi-bin/online/webbro/bro-index.w") try: # Fill login form br.select_form(name = 'frmLogin') frm = br.form ctrl = frm.find_control('userName') ctrl.value = USERNAME ctrl = frm.find_control('password') ctrl.value = PASSWORD # Submit login form if TESTRUN: print 'Submit Login Form' br.select_form(name = 'frmLogin') br.submit() except: print "Login form does not exist, please check URL, downloaded html or site is down" return None try: tar_url = "https://www.broderbros.com/cgi-bin/download/webshr/prod-info-view.w?f=bro-AllStyles_R06.tar.gz" br.retrieve(tar_url, pricing_tarfile) except: print "Error when downloading pricing file" return None try: tar = tarfile.open(pricing_tarfile) for member in tar.getmembers(): member.name = member.name.split('/')[-1] # strip directory from filename tar.extractall('catalog/stock_data/bro-AllStyles_R06') tar.close() except: print "Error when extracting items.csv" return None f_object = open('./catalog/stock_data/bro-AllStyles_R06/items_R06.csv', 'rb') #~ f_object = open('items_R06.csv', 'rb') print "Parse items_R06.csv ... " for row in csv.reader(f_object): itemRef = row[7].lower() if itemRef == 'style code': continue size = row[8].lower() color = row[11].lower() price = row[18] item_data = inventory_data.get(itemRef) if not item_data: continue # Find data with same size and color for var_dict in item_data: options = var_dict['options'] opt_dict = {} for opt in options: opt_type = opt['option_type'] opt_value = opt['option_value'] if opt_type == 'size': opt_dict['size'] = opt_value elif opt_type == 'color': opt_dict['color'] = opt_value if opt_dict['size'] == size and opt_dict['color'] == color: var_dict['price'] = [{'price_type': 'retail_price', 'price': price}] f_object.close() try: shutil.rmtree("./catalog/stock_data/bro-AllStyles_R06") #~ os.remove("./catalog/stock_data/bro-AllStyles_R06.tar.gz") except: pass return inventory_data
class ServerWatcher(Watcher): downloadProgress = Signal((int, int,)) uploadProgress = Signal((int, int,)) # Si added: textStatus = Signal((str,)) fileEvent = Signal((str,)) fileEventCompleted = Signal() loginCompleted = Signal((bool, str,)) badFilenameFound = Signal((str,)) LOCATION = 'server' TEST_FILE = 'iqbox.test' def __init__(self, host, ssl, parent=None): """ Initializes parent class and attributes. Decides whether to use `FTP_TLS` or `FTP` based on the `ssl` param. :param host: Location of the FTP server :param ssl: Tells whether the FTP needs to support TLS or not :param parent: Reference to a `QObject` instance a parent """ super(ServerWatcher, self).__init__(parent) self.interval = 5000 self.localdir = '' self.deleteQueue = [] self.downloadQueue = [] self.uploadQueue = [] self.warnedNames = [] self.ftp = None self.useSSL = ssl self.host = host self.preemptiveCheck = False self.preemptiveActions = [] self.testFile = 'iqbox.test' @property def currentdir(self): """Returns the current working directory at the server""" return self.ftp.pwd() def setLocalDir(self, localdir): """ Sets the local directory used to stored all downloaded files. Creates the directory if needed. :param localdir: Absolute path to local directory """ self.localdir = localdir if not os.path.exists(self.localdir): os.makedirs(self.localdir) @pause_timer @Slot() def checkout(self): """ Recursively checks out all files on the server. Returns a dictionary of files on the server with their last modified date. :param download: Indicates whether or not the files should be downloaded """ # Check `self.deleteQueue`, `self.uploadQueue` and `self.downloadQueue` queues. # These tasks are done in queues to make sure all FTP commands # are done sequentially, in the same thread. self.deleteAll() self.uploadAll() self.downloadAll() # Handy list to keep track of the checkout process. # This list contain absolute paths only. checked_dirs = list() # Sets '/' as initial directory and initializes `downloading_dir` self.ftp.cwd('/') downloading_dir = self.currentdir check_date = dt.utcnow() sidirlist = list() root_cached = False fileC = 0 while True: # Gets the list of sub directories and files inside the # current directory `downloading_dir`. self.textStatus.emit('Remote scan- Downloading folder list of '+downloading_dir+'...') if root_cached and downloading_dir == '/': dir_subdirs = saved_root_dirs dirfiles = saved_root_files else: dir_subdirs = self.getDirs(downloading_dir) if downloading_dir == '/': saved_root_dirs = dir_subdirs # sidirlist.extend(dir_subdirs) self.textStatus.emit('Remote scan- Downloading files list of '+downloading_dir+'...') dirfiles = self.getFiles(downloading_dir) if downloading_dir == '/': saved_root_files = dirfiles root_cached = True # Leading '/' in `downloading_dir` breaks the `os.path.join` call localdir = os.path.join(self.localdir, downloading_dir[1:]) if not os.path.exists(localdir): # Creates the directory if it doesn't already exists. os.makedirs(localdir) for file_ in dirfiles: # `serverpath` is the absolute path of the file on the server, # download it only if it hasn't been already downloaded serverpath = os.path.join(downloading_dir, file_) serverpath = QDir.fromNativeSeparators(serverpath) server_file = File.fromPath(serverpath) self.textStatus.emit('Scanning remote file... '+serverpath+'...') # How do we know if we should check this server file? # We see if the date last checked is the check start time. if server_file.last_checked_server != check_date: # Do this process only once per file # Added by Simon # Give feedback on scanning of files. fileC += 1 if fileC % 1 == 2: self.textStatus.emit('Scanning remote files for changes, '+str(fileC)+' files scanned.') # STEP: IS THIS THE FIRST TIME WE SAW THE FILE, OR WAS IT ALREADY IN OUR DB? just_added = not server_file.inserver # STEP: IF ITS A NEW FILE, ENSURE WE DONT WANT TO SKIP IT # Example: If it's a temporary file, or a Unix file with a name we don't support. if just_added: filename = os.path.basename(serverpath) if platform.system() == 'Windows': badName = False for chr in ['\\', '/', ':', '?', '"', '<', '>', '|']: if chr in filename: badName = True break if badName: if filename not in self.warnedNames: self.warnedNames.append(filename) self.badFilenameFound.emit(filename) continue # STEP: ASSUMING THE FILE DID EXIST IN OUR DB, LETS SAVE THE LAST MODIFICATION DATE lastmdate = server_file.servermdate # STEP: SAVE THE MOD DATE TO A VARIABLE # Now we get the last mod time. # We expect this to work fine since this file # was found on the server servermdate = self.lastModified(serverpath) # STEP: SET BOOL SHOWING THAT IT WAS ON THE SERVER, SINCE WE KNOW IT IS. server_file.inserver = True # STEP: SET THE TIME THE FILE WAS LAST CHECKED TO THE SCAN START TIME server_file.last_checked_server = check_date # STEP: SET THE MOD DATE IN THE DATABASE TO THE ONE WE JUST GOT server_file.servermdate = servermdate # STEP: SAVE THIS CHANGE TO THE DATABASE server_file.session.commit() delta = 0 if server_file.inlocal: delta = server_file.timeDiff() # Emit the signals after the attributes has been set and committed if just_added is True: self.fileAdded.emit(ServerWatcher.LOCATION, serverpath) elif server_file.servermdate > lastmdate or delta < -Watcher.TOLERANCE: self.fileChanged.emit(ServerWatcher.LOCATION, serverpath, False) #END FOR self.textStatus.emit('Remote scan- Finding next folder...') dir_ready = True for dir_ in dir_subdirs: # `dirpath` is the absolute path of the subdirectory on the server, dirpath = QDir.fromNativeSeparators(os.path.join(downloading_dir, dir_)) # `downloading_dir` is ready only when all its subdirectory are on the # `checked_dirs` list. if dirpath not in checked_dirs: # Found one subdirectory that is not on `checked_dirs`, # will process it in the next iteration. downloading_dir = dirpath dir_ready = False break if dir_ready is True: # All subdirectories of `downloading_dir` are already in `checked_dirs` if downloading_dir == '/': # All directories ready and at '/', means checkout is complete # So, exit the main While loop!! break else: # Not at '/'. Current directory is ready so is appended to `checked_dirs` # Back one directory to find directories that are not in `checked_dirs` checked_dirs.append(downloading_dir) downloading_dir = os.path.dirname(downloading_dir) self.textStatus.emit('Remote scan- Found Folder...') ##### END OF WHILE ################ ################################################################### # Deleted files are the ones whose `last_checked_server` attribute # didn't get updated in the recursive run. session = Session() deleted = session.query(File).filter(File.last_checked_server < check_date).filter(File.inserver == True) for file_ in deleted: self.fileDeleted.emit(ServerWatcher.LOCATION, file_.path) # Wraps up the checkout process, commits to the database. session.commit() @Slot() def onLogin(self, username, passwd): ok = True msg = '' error_msg = 'Login failed.' try: if not self.ftp: self.ftp = FTP_TLS(self.host) if self.useSSL is True else FTP(self.host) loginResponse = self.ftp.login(username, passwd) except socket.gaierror: self.ftp = None ok = False msg = 'Server address could not be found.' except (error_perm, error_reply): info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) ok = False msg = error_msg else: if '230' in loginResponse: ok = True else: ok = False msg = error_msg if ok: # Logged in. Now let's do compability tests. if not self.testPermissions(): # User doesn't have write permissions, don't bother doing next test. ok = False msg = 'It seems like you do not have write access to this server.' else: # Permissions test passed, now let's test MFMT for timestamp modification. if not self.testMFMT(): ok = False msg = 'This server does not support timestamp modification\n \ need by this application.' self.loginCompleted.emit(ok, msg) def getFiles(self, path): """ This method simply wraps the `nlst` method with an exception handler, and returns an empty list in case an exception is caught. :param path: Relative or absolute path on the server """ try: nlst = self.ftp.nlst(path) dirs = self.getDirs(path) # Files are items in nlst that are not in dirs files = [item for item in nlst if os.path.basename(item) not in dirs] return files except: print 'Exception in ServerWatcher.getDirs' info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) return [] def getDirs(self, path): """ Retrieves a list of the directories inside `path`, uses `retrlines` and the LIST command to retrieve the items. :param path: Relative or absolute path on the server """ dirs = list() def handleLine(line): """ Recieves a line from the LIST command. This function is meant to be used as callback for the `retrlines` method. :params line: Line from the LIST command """ if line.startswith('d'): # Only lines starting with 'd' are directories # Parse the directory out of the line; lines look like: # 'drwxrwxrwx 1 user group 0 Jun 15 2012 dirname' dirname = line[55:].strip() if dirname != '.' and dirname != '..': # Ignoring '.' and '..' entries dirs.append(dirname) try: self.ftp.retrlines('LIST %s' % path, handleLine) return dirs except: print 'Exception in ServerWatcher.getDirs' info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) return [] @upload_test def testPermissions(self): # For interface purposes. upload_test takes care of everything. return True @upload_test def testMFMT(self): # Absurd date to test whether the change really happened. time = dt.utcfromtimestamp(100000000) try: self.setLastModified(self.testFile, time) otherTime = self.lastModified(self.testFile) diff = (time - otherTime).total_seconds() if abs(diff) < 2: # Let's give it a 2 seconds tolerance. mdtm = True else: mdtm = False except (ValueError, error_reply, error_perm): info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) mdtm = False return mdtm @Slot(str) def onDelete(self, filename): self.deleteQueue.append(filename) def deleteNext(self): if len(self.deleteQueue) > 0: next = self.deleteQueue.pop(0) self.deleteFile(next) def deleteAll(self): for filename in self.deleteQueue: self.deleteFile(filename) self.deleteQueue = [] @Slot(str) def deleteFile(self, filename): """ Deletes the file `filename` to the server :param filename: Absolute or relative path to the file """ try: print 'Deleting %s' % filename self.ftp.delete(filename) return True except (error_reply, error_perm): print 'Error deleting %s' % filename return False self.fileEventCompleted.emit() @Slot(str) def onDownload(self, filename): self.downloadQueue.append(filename) def downloadNext(self): if len(self.downloadQueue) > 0: next = self.downloadQueue.pop(0) self.downloadFile(next) def downloadAll(self): for filename in self.downloadQueue: self.downloadFile(filename) self.downloadQueue = [] @Slot(str, str) def downloadFile(self, filename, localpath=None): """ Performs a binary download to the file `filename` located on the server. `filename` parameter can be either absolute or relative, though it can fail for relative paths if the current directory is not appropiate. :param filename: Relative or absolute path to the file :param localpath: Absolute local path where the file will be saved """ def handleChunk(chunk): """ Receives chuncks of data downloaded from the server. This function is meant to be used as callback for the `retrbinary` method. :params chunk: Chunk of downloaded bytes to be written into the file """ # Simply writes the received data into the file `self.downloading` self.downloading.write(chunk) self.download_progress += len(chunk) self.downloadProgress.emit(self.download_size, self.download_progress) if localpath is None: localpath = self.localFromServer(filename) localdir = os.path.dirname(localpath) if not os.path.exists(localdir): # Creates the directory if it doesn't already exists. os.makedirs(localdir) print 'Downloading: %s to %s' % (filename, localpath) try: with open(localpath, 'wb') as f: # Opens the file at `localname` which will hold the downloaded file. # Object attributes regarding download status are updated accordingly. self.fileEvent.emit(filename) self.downloading = f self.download_progress = 0 self.download_size = int(self.ftp.sendcmd('SIZE %s' % filename).split(' ')[-1]) self.ftp.retrbinary('RETR %s' % filename, handleChunk) print 'Download finished' # Let's set the same modified time to that on the server. with File.fromPath(filename) as downloadedfile: mdate = LocalWatcher.lastModified(localpath) downloadedfile.localmdate = mdate downloadedfile.servermdate = mdate self.setLastModified(filename, mdate) downloaded = True except (IOError, OSError): downloaded = False self.ioError.emit(localpath) except (error_reply, error_perm) as ftperr: print 'Error downloading %s, %s' % (filename, ftperr) downloaded = False # TODO: Sometimes the file doesn't complete properly. # in that case we maybe shouldn't call this? self.fileEventCompleted.emit() return downloaded @Slot(str) def onUpload(self, filename): self.uploadQueue.append(filename) def uploadNext(self): if len(self.uploadQueue) > 0: next = self.uploadQueue.pop(0) self.uploadFile(next) def uploadAll(self): for filename in self.uploadQueue: self.uploadFile(filename) self.uploadQueue = [] @Slot(str) def uploadFile(self, filename): """ Uploads the file `filename` to the server, creating the needed directories. :param filename: Absolute or relative path to the file """ def handle(buf): """This function is meant to be used as callback for the `storbinary` method.""" self.upload_progress += 1024 self.uploadProgress.emit(self.upload_size, self.upload_progress) # Creates the directory where the file will be uploaded to self.mkpath(os.path.dirname(filename)) localpath = self.localFromServer(filename) print 'Uploading %s to %s' % (localpath, filename) try: # Uploads file and updates its modified date in the server # to match the date in the local filesystem. self.upload_progress = 0 self.upload_size = os.path.getsize(localpath) self.fileEvent.emit(localpath) self.ftp.storbinary('STOR %s' % filename, open(localpath, 'rb'), 1024, handle) print 'Upload finished' with File.fromPath(filename) as uploaded: modified = uploaded.localmdate uploaded.servermdate = modified self.setLastModified(filename, modified) uploaded = True except (IOError, OSError): uploaded = False self.ioError.emit(localpath) except (error_reply, error_perm, OSError) as err: print 'Error uploading %s, %s' % (filename, err) uploaded = False # TODO: Sometimes the file doesn't complete properly. # in that case we maybe shouldn't call this? self.fileEventCompleted.emit() return uploaded def lastModified(self, filename): """ Uses the MDTM FTP command to find the last modified timestamp of the file `filename`. Returns a `datetime.datetime` object in UTC representing the file's last modified date and time. :param filename: Relative or absolute path to the file """ timestamp = self.ftp.sendcmd('MDTM %s' % filename) if '213 ' not in timestamp: # Second chance was found to be needed in some cases. timestamp = self.ftp.sendcmd('MDTM %s' % filename) timestamp = timestamp.split(' ')[-1] dateformat = '%Y%m%d%H%M%S.%f' if '.' in timestamp else '%Y%m%d%H%M%S' try: mtime = dt.strptime(timestamp, dateformat) except ValueError: mtime = dt.utcnow() return mtime def setLastModified(self, serverpath, newtime): """ Uses the MFMT or MDTM FTP commands to set `newtime` as the modified timestamp of the file `serverpath` on the server. :param serverpath: Relative or absolute path to the file :param newtime: datedatime object holding the required time """ cmds = ['MFMT', 'MDTM'] for cmd in cmds: try: self.ftp.sendcmd( '%s %s %s' % (cmd, newtime.strftime('%Y%m%d%H%M%S'), serverpath)) return except (error_perm, error_reply) as e: if cmd == cmds[len(cmds) - 1]: # If is the last comand, re-raise the exception, else # keep trying. raise e else: continue def mkpath(self, path): """ Creates the path `path` on the server by recursively created folders, if needed. :param path: Absolute path on the server to be created """ try: self.ftp.cwd(path) except error_perm: # `cwd` call failed. Need to create some folders make_dir = '/' steps = path.split('/') for step in steps: if len(step) == 0: continue make_dir += '%s/' % step try: self.ftp.mkd(make_dir) except error_perm: # Probably already exists continue else: # `cwd` call succeed. No need to create # any folders self.ftp.cwd('/') return @Slot(str, str) def added(self, location, serverpath): super(ServerWatcher, self).added(location, serverpath) def actionFromPath(serverpath): f = File() fileExistsOnServer = True try: f.servermdate = self.lastModified(serverpath) except error_perm: fileExistsOnServer = False f.servermdate = 0 f.localmdate = LocalWatcher.lastModified(self.localFromServer(serverpath)) diff = f.timeDiff() action = None if abs(diff) > Watcher.TOLERANCE: if not fileExistsOnServer or diff > 0: action = FileAction(serverpath, FileAction.UPLOAD, ServerWatcher.LOCATION) else: action = FileAction(serverpath, FileAction.DOWNLOAD, LocalWatcher.LOCATION) return action if self.preemptiveCheck: if location == ServerWatcher.LOCATION: localpath = self.localFromServer(serverpath) if not os.path.exists(localpath): action = FileAction(serverpath, FileAction.DOWNLOAD, ServerWatcher.LOCATION) self.preemptiveActions.append(action) else: action = actionFromPath(serverpath) if action is not None: self.preemptiveActions.append(action) elif location == LocalWatcher.LOCATION: try: self.ftp.sendcmd('SIZE %s' % serverpath) except (error_reply, error_perm): exists = False else: exists = True if not exists: action = FileAction(serverpath, FileAction.UPLOAD, LocalWatcher.LOCATION) self.preemptiveActions.append(action) else: action = actionFromPath(serverpath) if action is not None: self.preemptiveActions.append(action) @Slot(str, str) def changed(self, location, serverpath): super(ServerWatcher, self).changed(location, serverpath) @Slot(str, str) def deleted(self, location, serverpath): super(ServerWatcher, self).deleted(location, serverpath) with File.fromPath(serverpath) as deleted: deleted.inserver = False
from ftplib import FTP_TLS ftps = FTP_TLS(host='52.74.191.39', user='******', passwd='', keyfile='lx_sg1.pem', timeout=10) ftps.login() # login anonymously before securing control channel ftps.prot_p() # switch to secure data connection.. IMPORTANT! Otherwise, only the user and password is encrypted and not all the file data. ftps.retrlines('LIST') filename = 'remote_filename.bin' print 'Opening local file ' + filename myfile = open(filename, 'wb') ftps.retrbinary('RETR %s' % filename, myfile.write) ftps.close()
def retrieveFtplib(filename, compression = None, usecols=None, dtype=None, toPrint = False, sep="\t"): """ function that connects to the remote FTP serveur and extract a pandas dataframe the downloaded file must contain a csv file. It can be bz2, gz encoded or not encoded at all if it is encoded, the right extension must be present in the name -- IN filename : the filename with its extension to be downloaded from the remote ftp server (string) compression : string that specifies the encoding of the file (string in [None,"gz","bz2"] default: None usecols : an array containing the name of the column to extract (string[]) default: None dtype : a dictionary containing the name of the columns and the type to cast them ({string:string}) default: None toPrint : boolean that settles if the function should print its progress and results (boolean) default: False -- OUT db : a pandas dataframe containing the remote database (pandas.Dataframe) return None when an error occurs """ startTime = time.time() if toPrint: print "===========================================" print "=== Connection to the remote FTP server ===" print "===========================================" print "" print "using ftplib" print "loading :",filename print "" ftp = FTP_TLS() # retrieving information about account on ftp server (user, password, host, port) = getAccount() if user==None: print "error : coudn't read the account information" return None # connecting and logging in try: ftp.connect(host,port) ftp.login(user,password) except: print "error : unable to connect to the ftp server" return None # establishing the security protocol ftp.prot_p() if toPrint: print "connected to the FTP server" # retrieving the remote file as a binary file sio = StringIO.StringIO() def handle_binary(more_data): sio.write(more_data) try: ftp.retrbinary("RETR "+filename, callback=handle_binary) except: print "error : non-existing file :",filename return None # Go back to the start of the binary file sio.seek(0) interval = time.time() - startTime if toPrint: print 'Data downloaded :', interval, 'sec' # Unziping the file if compression!=None: if compression=="gz": try: results = gzip.GzipFile(fileobj=sio) except: print "error : decompression impossible : not a gzip file" return None if toPrint: interval = time.time() - startTime print 'Decompression done :', interval, 'sec' elif compression=="bz2": results = StringIO.StringIO() a = bz2.decompress(sio.read()) results.write(a) results.seek(0) try: pass except: print "error : decompression impossible : not a bz2 file" return None if toPrint: interval = time.time() - startTime print 'Decompression done :', interval, 'sec' else: results = sio # extracting the file into a pandas dataframe try: db = pd.read_csv(results,sep=sep, usecols = usecols) except: print "error : the file doesn't not contain a proper Dataframe" return None sio.close() interval = time.time() - startTime if toPrint: print 'Dataframe created :', interval, 'sec' return db
class FTPClient(object): """Class FTPClient """ _mh = None _client = None _secured = None _host = None _port = None _user = None _passw = None _path = None _verbose = None _is_connected = None def __init__(self, secured=False, verbose=False): """Class constructor Called when the object is initialized Args: secured (bool): secured FTP verbose (bool): verbose mode """ self._mh = MasterHead.get_head() self._secured = secured if (not self._secured): self._client = FTP() else: if (not(version_info[0] == 2 and version_info[1] == 6)): self._client = FTP_TLS() else: raise NotImplementedError( 'Secured mode is not supported for Python 2.6') self._verbose = verbose if (self._verbose): self._client.set_debuglevel(2) @property def client(self): """ FTP client property getter """ return self._client @property def secured(self): """ secured protocol mode property getter """ return self._secured @property def host(self): """ server host property getter """ return self._host @property def port(self): """ server port property getter """ return self._port @property def user(self): """ username property getter """ return self._user @property def passw(self): """ user password property getter """ return self._passw @property def path(self): """ remote path property getter """ return self._path @property def verbose(self): """ verbose mode property getter """ return self._verbose @property def is_connected(self): """ is_connected property getter """ return self._is_connected def connect(self, host, port=21, user=None, passw=None, path='/', timeout=10): """Method connects to server Args: host (str): server host port (int): server port, default protocol port user (str): username passw (str): password path (str): server path timeout (int): timeout Returns: bool: result Raises: event: ftp_before_connect event: ftp_after_connect """ try: message = '{0}/{1}@{2}:{3}{4} timeout:{5}'.format( user, passw, host, port, path, timeout) self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_connecting', message), self._mh.fromhere()) ev = event.Event( 'ftp_before_connect', host, port, user, passw, path, timeout) if (self._mh.fire_event(ev) > 0): host = ev.argv(0) port = ev.argv(1) user = ev.argv(2) passw = ev.argv(3) path = ev.argv(4) timeout = ev.argv(5) self._host = host self._port = port self._user = user self._passw = passw if (ev.will_run_default()): self._client.connect(self._host, self._port, timeout=timeout) if (self._user != None): self._client.login(self._user, self._passw) if (self._secured): self._client.prot_p() self._is_connected = True self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_connected'), self._mh.fromhere()) if (path != None): self.change_dir(path) ev = event.Event('ftp_after_connect') self._mh.fire_event(ev) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return False def disconnect(self): """Method disconnects from server Args: none Returns: bool: result """ try: if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False else: self._client.quit() self._is_connected = False self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_disconnected'), self._mh.fromhere()) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return False def list_dir(self): """Method lists remote working directory Args: none Returns: list: names """ try: self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_list_dir', self._path), self._mh.fromhere()) if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False names = self._client.nlst() if ('.' in names): names.remove('.') if ('..' in names): names.remove('..') return names except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return None def change_dir(self, path): """Method changes remote working directory Args: path (str): new remote path Returns: bool: result Raises: event: ftp_before_change_dir """ try: self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_change_dir', path), self._mh.fromhere()) if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False ev = event.Event('ftp_before_change_dir', path) if (self._mh.fire_event(ev) > 0): path = ev.argv(0) if (ev.will_run_default()): self._client.cwd(path) self._path = self._client.pwd() self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_cur_dir', self._path), self._mh.fromhere()) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return False def download_file(self, remote_path, local_path=None): """Method downloads file from server Args: remote_path (str): remote path local_path (str): local path, default ./filename Returns: bool: result Raises: event: ftp_before_download_file event: ftp_after_download_file """ try: self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_downloading_file', remote_path), self._mh.fromhere()) if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False ev = event.Event( 'ftp_before_download_file', remote_path, local_path) if (self._mh.fire_event(ev) > 0): remote_path = ev.argv(0) local_path = ev.argv(1) if (local_path != None and not path.exists(local_path)): self._mh.demsg('htk_on_error', self._mh._trn.msg( 'htk_ftp_unknown_dir', local_path), self._mh.fromhere()) return False filename = remote_path.split('/')[-1] lpath = filename if (local_path == None) else path.join( local_path, filename) if (ev.will_run_default()): with open(lpath, 'wb') as f: self._client.retrbinary('RETR ' + remote_path, f.write) self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_file_downloaded'), self._mh.fromhere()) ev = event.Event('ftp_after_download_file') self._mh.fire_event(ev) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) if (path.exists(lpath)): remove(lpath) return False def upload_file(self, local_path, remote_path=None): """Method uploads file to server Args: local_path (str): local path remote_path (str): remote path, default ./filename Returns: bool: result Raises: event: ftp_before_upload_file event: ftp_after_upload_file """ try: self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_uploading_file', local_path), self._mh.fromhere()) if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False ev = event.Event('ftp_before_upload_file', local_path, remote_path) if (self._mh.fire_event(ev) > 0): local_path = ev.argv(0) remote_path = ev.argv(1) if (not(path.exists(local_path) or path.exists(path.relpath(local_path)))): self._mh.demsg('htk_on_error', self._mh._trn.msg( 'htk_ftp_unknown_file', local_path), self._mh.fromhere()) return False filename = local_path.split('/')[-1] rpath = filename if (remote_path == None) else path.join( remote_path, filename) if (ev.will_run_default()): with open(local_path, 'rb') as f: self._client.storbinary('STOR ' + rpath, f) self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_file_uploaded'), self._mh.fromhere()) ev = event.Event('ftp_after_upload_file') self._mh.fire_event(ev) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return False def delete_file(self, path): """Method deletes file from server Args: path (str): remote path Returns: bool: result Raises: event: ftp_before_delete_file """ try: self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_deleting_file', path), self._mh.fromhere()) if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False ev = event.Event('ftp_before_delete_file', path) if (self._mh.fire_event(ev) > 0): path = ev.argv(0) if (ev.will_run_default()): self._client.delete(path) self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_file_deleted'), self._mh.fromhere()) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return False def make_dir(self, path): """Method makes directory on server Args: path (str): remote path Returns: bool: result Raises: event: ftp_before_make_dir """ try: self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_making_dir', path), self._mh.fromhere()) if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False ev = event.Event('ftp_before_make_dir', path) if (self._mh.fire_event(ev) > 0): path = ev.argv(0) if (ev.will_run_default()): self._client.mkd(path) self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_dir_made'), self._mh.fromhere()) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return False def remove_dir(self, path): """Method removes directory from server Args: path (str): remote path Returns: bool: result Raises: event: ftp_before_remove_dir """ try: self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_removing_dir', path), self._mh.fromhere()) if (not self._is_connected): self._mh.demsg('htk_on_warning', self._mh._trn.msg( 'htk_ftp_not_connected'), self._mh.fromhere()) return False ev = event.Event('ftp_before_remove_dir', path) if (self._mh.fire_event(ev) > 0): path = ev.argv(0) if (ev.will_run_default()): self._client.rmd(path) self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_dir_removed'), self._mh.fromhere()) return True except all_errors as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return False