def __enter__(self): if self.hash in self.__conn_pool: try: self.conn._session.voidcmd('NOOP') except: pass # Assume connection timeout else: return self session_factory = \ FtpTlsSession if self._scheme == 'ftps://' else FtpSession ftp_host = ftputil.FTPHost( self._host, self._port, self._user, self._passwd, session_factory=session_factory) self.__conn_pool[self.hash] = ftp_host return self
def main(): ap = argparse.ArgumentParser( description= 'This script will connect to the FTP server, check if new files are available, and then download the updated files if that is the case. It specifically checks the files in the path indicated to see if those files have been modified since its been uploaded.' ) ap.add_argument( "path", type=str, help= "indicate the folder where current pubtator files are or where they wish to be placed" ) ap.add_argument("email", type=EmailType('RFC5322'), help="email must be provided: [email protected]") if len(sys.argv) == 1: ap.print_help(sys.stderr) sys.exit(1) args = vars(ap.parse_args()) pathlocation = args['path'] email = args['email'] # connect to FTP server using name (anonymous) and password (email address) host = ftputil.FTPHost('ftp.ncbi.nlm.nih.gov', 'anonymous', email) # list all files/folders in current directory host.listdir('pub') #changes directory to pub host.chdir('pub') host.chdir('lu') host.chdir('PubTator') files = [ 'disease2pubtator.gz', 'gene2pubtator.gz', 'mutation2pubtator.gz', 'chemical2pubtator.gz' ] #for loop where files are being checked/updated for i in files: if host.download_if_newer(i, pathlocation + i) == True: print("downloading: " + i) else: print(i + " is up to date")
def get_experiment_urls(exp): exp_urls = [] with ftputil.FTPHost(sra_host, sra_user, sra_password) as ftp_host: download_paths = [] exp_dir = '/sra/sra-instant/reads/ByExp/sra/SRX/%s/%s/' \ %(exp[:6],exp) ftp_host.chdir(exp_dir) run_folders = ftp_host.listdir(ftp_host.curdir) # compile a list of all files for folder in run_folders: files = ftp_host.listdir(folder) assert len(files) == 1 for f in files: path = exp_dir + folder + '/' + f exp_urls.append(path) return exp_urls
def backup(database, ftp, mysqldump): logging.info('Starting backup') name = backup_name(database) path = '/tmp/' + name logging.info('Backup database to {}'.format(path)) db = dj_database_url.parse(database) s.bash( '-c', '"' + ' '.join([ mysqldump, '-u', db['USER'], '-p' + db['PASSWORD'], '-h', db['HOST'], '-P', str(db['PORT']), db['NAME'] ]) + '"').redirect(path, append=False, stdout=True, stderr=False).run() s.tar('cf', path + '.tar.gz', path).run() s.rm(path).run() path = path + '.tar.gz' ftp = urlparse.urlparse(ftp, 'ftp') logging.info('Sending backup {} to {}'.format(path, ftp.hostname)) host = ftp.hostname if ftp.port: host = host + ':' + ftp.port # noinspection is due to an error into FTPHost code # noinspection PyDeprecation with ftputil.FTPHost(host, ftp.username, ftp.password) as host: host.makedirs(ftp.path) host.upload_if_newer(path, ftp.path + name + '.tar.gz') host.chdir(ftp.path) files = [ file for file in host.listdir(ftp.path) if file.startswith(backup_front_name(database)) ] files.sort() while len(files) > options.max: old_file = files.pop(0) host.remove(ftp.path + old_file) logging.info("Deleted old file {}".format(old_file)) s.rm(path).run() logging.info('Ended')
def find_newest_files(folder): """Return absolute paths of newest files on server. This function will descend into subdirectories of the folder. :param folder: The folder on the FTP server where we shall find the newest file. We will descend into subdirectories of this folder. :type folder: str :returns: The path name of the newest file, i.e., the one with the most recent modification time. """ newest_in_period = { period["name"]: FileInfo(name=construct_file_name(period)) for period in PERIODS } file_names_to_avoid = [construct_file_name(period) for period in PERIODS] with ftputil.FTPHost(FTP_ADDRESS, FTP_USERNAME, FTP_PASSWORD) as ftp: for dirpath, dirnames, files in ftp.walk(folder): for f in [ fname for fname in files if fname.endswith(FILE_TYPE) and fname not in file_names_to_avoid ]: fullpath_filename = dirpath + "/" + f statinfo = ftp.stat(fullpath_filename) mtime = statinfo.st_mtime logging.debug("%s modified at %f", fullpath_filename, mtime) for period in PERIODS: if within_period(mtime, period): nip = newest_in_period[period["name"]] if mtime > nip.mtime: nip.path = fullpath_filename nip.mtime = mtime newest_files = [fi for fi in newest_in_period.itervalues() if fi.path] return newest_files
def juurimod(): if os.path.exists(zippi): os.remove(zippi) valmis = "Varmuuskopiointi valmis!" + "\n" f = open('c:/tmep/halko.txt', 'a') if os.path.exists(kohde) != True: os.mkdir(kohde) f.write("Luotiin kansio" + "\n") valmis += " Luotiin kansio" + "\n" f.write("\nKopsattu " + strftime('%d.%m.%Y %H:%m', gmtime()) + "\n") zf = zipfile.ZipFile(zippi, "a", zipfile.ZIP_DEFLATED) with ftputil.FTPHost('164.215.36.22', 'ohjelmointimm19', 'HD5a6s7d8ssaB') as host: names = host.listdir(host.curdir) print(names) for name in names: print(name) if host.path.isfile(name): os.chdir(temppi) host.download(name, name) zf.write(os.path.join(temppi, name)) valmis += " Haettiin tiedosto " + name + " palvelimelta" + "\n" for root, dirs, files in os.walk(juuri): for file in files: koko = str(os.path.getsize(os.path.join(root, file))) + " KB" content = os.path.join(root, file) + " " + koko print(content) f.write(os.path.join(root, file) + ' ' + koko + '\n') zf.write(os.path.join(root, file)) valmis += " kopioitiin tiedosto " + file + "\n" zf.close() f.close() return valmis
def openMirbaseReadme(self): with ftputil.FTPHost('mirbase.org', 'anonymous', 'anonymous') as fH: fobj = fH.open('pub/mirbase/CURRENT/README') store = False dataset = defaultdict(dict) index = 0 for line in fobj.readlines(): if store: row = line.strip().split() if len(row) == 3 and row[1][0].isdigit(): dataset[index]['Version'] = row[0] dataset[index]['Date'] = row[1] dataset[index]['Entries'] = row[2] index += 1 if 'HISTORY' in line: store = True return pd.DataFrame(dataset).transpose()
def _ask_files(ftp_host, ftp_user, ftp_password, ftp_input_folder, file_name_regexp): with ftputil.FTPHost( ftp_host, ftp_user, ftp_password, ) as host: file_list = host.listdir(ftp_input_folder) for file_name in file_list: if re.search(file_name_regexp, file_name): basename, extension = os.path.splitext(file_name) if extension == '.csv': hash_file_name = basename + '.md5' if hash_file_name in file_list: yield ( os.path.join(ftp_input_folder, file_name), os.path.join(ftp_input_folder, hash_file_name), )
def open_connection(self): self.ftp = ftputil.FTPHost(host=self.server, port=self.port, user=self.user, password=self.password, session_factory=ftputil_custom.FTPSession) if self._stat_cache == None: self._stat_cache = ftputil_custom._StatMLSD(self.ftp) else: _cache = self._stat_cache._lstat_cache self._stat_cache = ftputil_custom._StatMLSD(self.ftp) self._stat_cache._lstat_cache = _cache self.ftp._stat = self._stat_cache self.ftp.chdir(self.basepath) return self
def login(credentials) : iv, data, padding = credentials['iv'], credentials['data'], credentials['padd'] try : data = get_data(iv, data, padding) except : print(col.minus, 'key not valid') return None, None data = json.loads(data) name, passwd, server, loc = data['name'], data['passwd'], data['server'], data['loc'] try : ftps = ftputil.FTPHost(server, name, passwd) except : print(col.minus, 'cannot connect, check also the settings of the server') return None, None return ftps, loc
def get_file_trunks(host, file_path, user_name, passwd, chunk_size=1024): logging.debug("Begin Download Ftp File %s in %s with %s@%s (size=%s)" % (file_path, host, user_name, passwd, chunk_size)) with ftputil.FTPHost(host, user_name, passwd) as ftp_host: try: file_obj = None file_obj = ftp_host.open(file_path, 'rb') while True: chunk = file_obj.read(chunk_size) if not chunk: break yield chunk except IOError as err: raise err finally: if file_obj is not None: file_obj.close() pass
def ftp_upload(local_dir, saf_url): u = urlparse(saf_url) host = u.hostname info = host_info(host) if info is None: abort("Host %s not in database." % host) login = info['login'] password = info['password'] # remote_dir, no slash, e.g. /var/www/public_html/playlists/grooves remote_dir = info['ftp_root'] + u.path if remote_dir.endswith('/'): remote_dir = remote_dir[:-1] if not local_dir.endswith(os.path.sep): local_dir += os.path.sep # Download some files from the login directory. with ftputil.FTPHost(host, login, password) as ftp_host: ulocal = unicode(local_dir) uremote = unicode(remote_dir) try: ftp_host.chdir(uremote) except: try: ftp_host.makedirs(uremote) ftp_host.chdir(uremote) except: raise print("remote_dir set to %r" % uremote) for name in os.listdir(ulocal): print("testing %r" % name) ext = os.path.splitext(name)[1].lower() if ext in ['.mp3', '.m3u', '.saf']: upath = os.path.join(ulocal, name) if os.path.isfile(upath): # remote name, local name, binary mode try: ftp_host.upload(upath, name) print("uploaded %r to %r" % (upath, name)) except: print("failed to upload %r: %s" % (name, sys.exc_info()[0]))
def synchronize(clean=False): try: host = config.ftp.host except AttributeError: sys.exit('cannot synchronize, configure the FTP server access first') username = getattr(config.ftp, 'username', '') password = getattr(config.ftp, 'password', '') ftp = ftputil.FTPHost(host, username, password, port=int(config.ftp.port), passive=int(config.ftp.passive), session_factory=FTPSession) path = getattr(config.ftp, 'path', '') if path: ftp.chdir(path) syncer = Synchronizer(ftp) syncer.synchronize(clean=clean, clean_error='%s has no local copy, use --force to remove')
def login(login_details): """ Log in client to server. :param login_details: a dictionary of login inputs. :return: """ # Ensures all login details are valid. host = login_details["host"] port = 2020 username = login_details["username"] password = login_details["password"] ftp_host = ftputil.FTPHost(host, username, password, port=port, session_factory=mySession) return ftp_host
def obs2opendap(upload_files, basin, data_usage, var_name, time_freq, database): """ Upload thiessen .asc files from database's local directory to databases's opendap directory :param upload_files: name of the files to be uploaded :type upload_files: str :param basin: name of the basin or sub-basin :type basin: str :param data_usage: Data usage, must be calibration or operation :type data_usage: str :param var_name: name of the basin or sub-basin :type var_name: str :param time_freq: Time frequency, must be 'daily' or 'monthly' :type time_freq: str :param database: Database used. Must be "inmet", "inmet_ana", "chirps". :type database: str """ url_musf = "opendap4.funceme.br" ftp_user = "******" ftp_pass = "******" file_name = os.path.basename(upload_files) dir_target = '/io/{0}/{1}/{2}/{3}_thiessen/{4}/'\ .format(database, data_usage, time_freq, var_name, basin_dict(basin)[1]) ftp = ftputil.FTPHost(url_musf, ftp_user, ftp_pass) if ftp.path.isdir(dir_target): ftp.upload(upload_files, dir_target + file_name) print "" print ("Uploading {0} ...".format(file_name)) else: ftp.makedirs(dir_target) ftp.upload(upload_files, dir_target + file_name) print "" print ("Uploading {0} ...".format(file_name)) ftp.close()
def download(model, rev, fw_ver, fw_url, fdate): from web_utils import getFileSha1, getFileMd5 import ftputil try: fname = fw_url.split('/')[-1] def epilog(fsize, fdate): if not os.path.isfile(localstor + fname): sha1 = None md5 = None else: sha1 = getFileSha1(localstor + fname) md5 = getFileMd5(localstor + fname) with open('uk_dlink_filelist.csv', 'a') as fout: cw = csv.writer(fout) cw.writerow( [model, rev, fw_ver, fw_url, fsize, fdate, sha1, md5]) return from urllib import parse fw_path = parse.urlsplit(fw_url).path netloc = parse.urlsplit(fw_url).netloc with ftputil.FTPHost(netloc, 'anonymous', '') as host: if not host.path.isfile(fw_path): print('"ftp://%s/%s" does not exist.' % (netloc, fw_path)) epilog(-1, None) return fsize = host.path.getsize(fw_path) if fdate is None: fdate = host.path.getmtime(fw_path) if os.path.isfile(localstor + fname) and os.path.getsize(localstor + fname) == fsize: print('%(fname)s already exists' % locals()) epilog(fsize, fdate) return print('Start downloading %(fw_url)s' % locals()) host.download(fw_path, localstor + fname) print('Finised downloading %(fw_url)s' % locals()) epilog(fsize, fdate) return except Exception as ex: print(ex)
def create_connection(self, *args_to_use, **kwargs_to_use): """create a connection to the FTP server using the saved arguments""" ftp_base_class = (ftplib.FTP_TLS if kwargs_to_use["encrypt_data_channel"] else ftplib.FTP) ftp_session_factory = ftputil.session.session_factory( base_class=ftp_base_class, port=kwargs_to_use["port"], encrypt_data_channel=kwargs_to_use["encrypt_data_channel"], debug_level=None, ) return ftputil.FTPHost( kwargs_to_use["host"], kwargs_to_use["username"], kwargs_to_use["password"], session_factory=ftp_session_factory, )
def main(): test_dir = "pub/FreeBSD/doc" ftp_host = ftputil.FTPHost("ftp.de.freebsd.org", 'anonymous', "*****@*****.**") def onerror(err): print err for top, dirs, nondirs in ftp_host.walk(test_dir, onerror=onerror): print top print " ", dirs print " ", nondirs print if top == "pub/FreeBSD/doc/fr_FR.ISO8859-1/books/ppp-primer": break print "Stat cache:" #print ftp_host.stat_cache print len(ftp_host.stat_cache), "entries in cache" ftp_host.close()
def ftp_cdr_downloads(): try: with ftputil.FTPHost(ftp_host, ftp_usr, ftp_pwd) as node_source: print unicode(datetime.datetime.now())+">Download Started for "+ftp_node+"<" node_source.chdir(ftp_src) os.chdir(ftp_dest) file_list = node_source.listdir(node_source.curdir) past = time.time() - 24*60*60 for file_name in file_list: if (node_source.path.isfile(file_name)) and ('smsc' in file_name.lower()) and (node_source.path.getmtime(file_name)>=past): if node_source.download_if_newer(file_name,file_name): print unicode(datetime.datetime.now())+">Downloading :"+file_name print unicode(datetime.datetime.now())+">Download Complete for "+ftp_node+"<" except Exception,e: print "ERROR: ", str(e),"EOE"
def ftp_bar(src, home): ftp_host = ftputil.FTPHost(host, user, password) ftp_host.chdir(ftp_path) logging.info('Копирование %s to %s', src, home) size = ftp_host.path.getsize(src) os.chdir(home) t = threading.Thread(target=ftp_host.download, args=( src, src, )) t.setDaemon(True) t.start() tm = 0 while True: if os.path.isfile(os.path.join(home, src)): break tm += 1 time.sleep(0.2) if tm == 10: break widgets = [ src + ' ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed() ] pbar = ProgressBar(widgets=widgets, maxval=size).start() w = 0 while True: try: cur_size = os.path.getsize(os.path.join(home, src)) except FileNotFoundError: w += 1 time.sleep(0.2) if w == 10: break continue pbar.update(cur_size) if cur_size == size: break pbar.finish()
def test_connection(request): user = request.POST['user'] host = request.POST['host'] password = request.POST['password'] deployment_type = request.POST['type'] ftp_home_dir = request.POST['ftp_home_dir'] success = True if deployment_type == Deployment.SSH_SCRIPT: print "SSH" try: with settings(host_string=host, user=user, password=password, abort_on_prompts=True): try: res = run('echo "test"') except: success = False except: success = False elif deployment_type == Deployment.FTP_SYNC: try: host = ftputil.FTPHost(host, user, password) host.chdir(ftp_home_dir) except: success = False resp = "1" if not success: resp = "0" return HttpResponse(resp)
def get_project_urls(project): """Get the URLs for all runs from a given project. TODO: docstring""" urls = [] with ftputil.FTPHost(sra_host, sra_user, sra_password) as ftp_host: download_paths = [] exp_dir = '/sra/sra-instant/reads/ByStudy/sra/SRP/%s/%s/' \ %(project[:6], project) ftp_host.chdir(exp_dir) run_folders = ftp_host.listdir(ftp_host.curdir) # compile a list of all files for folder in run_folders: files = ftp_host.listdir(folder) assert len(files) == 1 for f in files: path = exp_dir + folder + '/' + f urls.append(path) return urls
def ftp_connect(host, user, pwd): """ Connects to a FTP server. Returns a FTPutil connection instance. Keyword Arguments: host -- <String> Hostname of the FTP server user -- <String> FTP Username pwd -- <String> FTP Password """ # Connect to FTP (try-catch) try: con = ftputil.FTPHost(host, user, pwd) except ftputil.error.FTPError as e: print "[ERROR]: Can't connect to the FTP server" print e quit() else: print 'Successfully connected to the FTP server.' return con
def go_to_ftp(): try: command_list = [] file_list = [] objects_to_process = {"files": file_list, "commands": command_list} print_log("go to ftp") with ftputil.FTPHost(get_conf("ftp_address"), get_conf("ftp_user"), get_conf("ftp_user")) as host: host.chdir(get_conf("ftp_dir")) for item in host.listdir(host.curdir): if "get_" in item: command_list.append(item) host.remove(item) if ".csv" in item: file_list.append(item) host.download(item, item) host.remove(item) except Exception as err: print_log("ftp error: " + str(err)) return objects_to_process
def ftp_upload_dir(host, username, password, local_dir, remote_dir): sf = ftputil.session.session_factory( base_class=ftplib.FTP, port=21, use_passive_mode=False ) if local_dir.endswith(os.sep): local_dir = local_dir[:-1] with ftputil.FTPHost(host, username, password, session_factory=sf) as ftp: for base, dirs, files in os.walk(local_dir): remote_base = base.replace(local_dir, remote_dir) if not ftp.path.exists(remote_base): ftp.mkdir(remote_base) for f in files: local_f = os.path.join(base, f) remote_f = ftp.path.join(remote_base, f) ftp.upload(local_f, remote_f)
def __init__(self, ip, username='******', password=''): try: os.mkdir("ftp") except OSError: pass try: self._host = ftputil.FTPHost(ip, username, password) except PermanentError: raise InvalidCreditdentialsException( "Incorrect username or password") return self._worldname = None if 'server.properties' in self._host.listdir(self._host.curdir): self._host.download('server.properties', os.path.join('ftp', 'server.properties')) with open(os.path.join('ftp', 'server.properties'), 'r') as props: content = props.readlines() if len(content) > 1: for prop in content: if prop.startswith("level-name"): self._worldname = str( prop.split("=")[1:][0]).rstrip("\n") else: for prop in content[0].split('\r'): if prop.startswith("level-name"): self._worldname = str( prop.split("=")[1:][0]).rstrip("\r") else: raise CouldNotFindPropertiesException( "Could not find the server.properties file! The FTP client will not be able to download the world unless the server.properties file is in the default FTP directory" ) if self._worldname in self._host.listdir(self._host.curdir): try: os.mkdir(os.path.join('ftp', self._worldname)) except OSError: pass else: raise CouldNotFindWorldFolderException( "Could not find the world folder from the server.properties file" )
def __init__(self, cfg_ftp_host, cfg_ftp_usr, cfg_ftp_pwd, cfg_ftp_dir, cfg_s3_key=None, cfg_s3_secret=None, bucket_name=None, local_dir='./data/', logger=None): self.cfg_ftp_host = cfg_ftp_host self.cfg_ftp_usr = cfg_ftp_usr self.cfg_ftp_pwd = cfg_ftp_pwd self.cfg_ftp_dir = cfg_ftp_dir self.ftp_host = ftputil.FTPHost(cfg_ftp_host, cfg_ftp_usr, cfg_ftp_pwd) self.ftp_host.use_list_a_option = False self.s3_client = None self.bucket_name = None if cfg_s3_key and cfg_s3_secret and bucket_name: self.s3_client = boto3.client('s3', aws_access_key_id=cfg_s3_key, aws_secret_access_key=cfg_s3_secret) exists = True try: self.s3_client.head_bucket(Bucket=bucket_name) except botocore.exceptions.ClientError as e: # If a client error is thrown, then check that it was a 404 error. # If it was a 404 error, then the bucket does not exist. error_code = int(e.response['Error']['Code']) if error_code == 404: exists = False if not exists: if logger: logger.send_log('Bucket does not exist. Creating bucket', 'ok') self.s3_client.create_bucket(Bucket=bucket_name) self.bucket_name = bucket_name self.local_dir = local_dir if not os.path.exists(local_dir): os.makedirs(local_dir) self.logger = logger
def connect(): """ Attempts to establish a connection with the details specified by the user. """ global ftp global message global connection while True: clear_output() user_message() print("Connect to an FTP Server\nEnter 'q' to quit.") server = input( "Please enter the address of the FTP server you'd like to connect to: " ) if server.lower() == "q": break print( "Note: You can leave the following two inputs blank to login as 'anonymous'." ) user = input("Please enter in the user name: ") if user.lower() == "q": break elif user.strip() == "": user = "******" password = getpass.getpass( "Please enter in the password of the FTP server: ") if password.lower() == "q": break elif password.strip() == "": password = "******" try: ftp = ftputil.FTPHost(server, user, password) except: message = "Please ensure that your details are correct." else: message = f"Connection to {server} was successful!\n" connection = server break
def upload_to_gnps(input_filename, folder_for_spectra, group_name, username, password): url = "ccms-ftp01.ucsd.edu" with ftputil.FTPHost(url, username, password) as ftp_host: names = ftp_host.listdir(ftp_host.curdir) try: if not (folder_for_spectra in names): ftp_host.mkdir(folder_for_spectra) except: print("Cannot Make Folder", folder_for_spectra) ftp_host.chdir(folder_for_spectra) try: if not (group_name in ftp_host.listdir(ftp_host.curdir)): ftp_host.mkdir(group_name) except: print("Cannot Make Folder", group_name) ftp_host.chdir(group_name) ftp_host.upload(input_filename, os.path.basename(input_filename))
def get_all_files_in_dataset_folder_ftp(dataset_accession, folder_prefix, includefilemetadata=False, massive_host=None): import ftputil if massive_host == None: massive_host = ftputil.FTPHost("massive.ucsd.edu", "anonymous", "") directory = os.path.join(dataset_accession, folder_prefix) all_files = [] for root, dirs, files in massive_host.walk(directory, topdown=True, onerror=None): for filename in files: file_full_path = os.path.join(root, filename) if includefilemetadata: file_stats = massive_host.lstat(file_full_path) all_files.append({"path": file_full_path, "timestamp" : int(file_stats.st_mtime)}) else: all_files.append(file_full_path) return all_files