def Download_SSEBop_from_WA_FTP(local_filename, Filename_dir): """ This function retrieves SSEBop data for a given date from the ftp.wateraccounting.unesco-ihe.org server. Restrictions: The data and this python file may not be distributed to others without permission of the WA+ team due data restriction of the SSEBop developers. Keyword arguments: local_filename -- name of the temporary file which contains global SSEBop data Filename_dir -- name of the end file with the monthly SSEBop data """ # Collect account and FTP information username, password = WebAccounts.Accounts(Type = 'FTP_WA') ftpserver = "ftp.wateraccounting.unesco-ihe.org" # Download data from FTP ftp=FTP(ftpserver) ftp.login(username,password) directory="/WaterAccounting/Data_Satellite/Evaporation/SSEBop/sourcefiles/" ftp.cwd(directory) lf = open(local_filename, "wb") ftp.retrbinary("RETR " + Filename_dir, lf.write) lf.close() return
def download(dbname, dt, bbox=None): """Downloads SMAP soil mositure data for a set of dates *dt* and imports them into the PostGIS database *dbname*. Optionally uses a bounding box to limit the region with [minlon, minlat, maxlon, maxlat].""" res = 0.36 url = "ftp://n5eil01u.ecs.nsidc.org" ftp = FTP(url) ftp.login() ftp.cwd("SAN/SMAP/SPL3SMP.002") days = ftp.nlst() datadir = dt.strftime("%Y.%m.%d") if datadir in days: outpath = tempfile.mkdtemp() ftp.cwd(datadir) fname = [f for f in ftp.nlst() if f.find("h5") > 0][0] with open("{0}/{1}".format(outpath, fname), "wb") as f: ftp.retrbinary("RETR {0}".format(fname), f.write) f = h5py.File("{0}/{1}".format(outpath, fname)) lat = f["Soil_Moisture_Retrieval_Data"]["latitude"][:, 0] lon = f["Soil_Moisture_Retrieval_Data"]["longitude"][0, :] if bbox is not None: i = np.where(np.logical_and(lat > bbox[1], lat < bbox[3]))[0] j = np.where(np.logical_and(lon > bbox[0], lon < bbox[2]))[0] lat = lat[i] lon = lon[j] else: i = range(len(lat)) j = range(len(lon)) sm = f["Soil_Moisture_Retrieval_Data"]["soil_moisture"][i[0] : i[-1] + 1, j[0] : j[-1] + 1] # FIXME: Use spatially variable observation error # sme = f['Soil_Moisture_Retrieval_Data']['soil_moisture_error'][i[0]:i[-1]+1, j[0]:j[-1]+1] filename = dbio.writeGeotif(lat, lon, res, sm) dbio.ingest(dbname, filename, dt, table, False)
def ftp_download(): ftp_host = 'ftp.uniprot.org' ftp_user = '******' ftp_pass = '' ftp_path = '/pub/databases/uniprot/current_release/knowledgebase/reference_proteomes' ftp = FTP(ftp_host) ftp.login(ftp_user, ftp_pass) ftp.getwelcome() ftp.cwd(ftp_path) dirs = ftp.nlst() # print(dirs) p = 0 # Navigate to the required directory and thereby download data. for dir in dirs: if re.search(species, dir): path = ftp_path + '/' + str(species) # print(path) ftp.cwd(path) types = ftp.nlst() for x in types: if not re.search('DNA.fasta.gz', x) and re.search('fasta.gz', x): final = path + '/' + str(x) # print(final) fullfilename = os.path.join(store + str(x)) urllib.urlretrieve('ftp://' + ftp_host + str(final), fullfilename) p+=1 else: pass print("Number of viruses: " + str(p)) print(ftp.pwd())
def fetch(self, is_dl_forced=False): # figure out the version number by probing the "current_release", # then edit the file dict accordingly # connect to wormbase ftp current_dev_release_dir = \ 'pub/wormbase/releases/current-development-release' ftp = FTP('ftp.wormbase.org') ftp.login() ftp.cwd(current_dev_release_dir) # the current release dir is a redirect to a versioned release. # pull that from the pwd. pwd = ftp.pwd() ftp.quit() wsver = re.search(r'releases\/(WS\d+)', pwd) if wsver is None or len(wsver.groups()) < 1: logger.error( "Couldn't figure out version number from FTP site. Exiting.") exit(1) else: self.update_wsnum_in_files(wsver.group(1)) self.dataset.set_version_by_num(self.version_num) # fetch all the files self.get_files(is_dl_forced) return
def Get_conn(fileversion): try: dirname=os.path.dirname(fileversion) version=os.path.basename(fileversion) newversion = version + '*' filelistall=os.listdir("/data/upload/") filelist=[] for filename in filelistall: if not os.path.isfile(filename): if re.match(version,filename): filelist.append(filename) if len(filelist) > 0: return "2",filelist else: ftp=FTP() #Define Ftp server infomations to connect ftp.connect('183.61.86.129','2101') ftp.login('hxbns_yanfa','zQkn]/62taBpBJKK|F') ftp.cwd(dirname) filelist = ftp.nlst(newversion) if len(filelist) == 0: return "1",filelist else: return "0",filelist except ftplib.error_perm,e: print "\033[31mFile version does not exist!!\033[0m" return "1",Localpath sys.exit(1)
def urls(base='.'): """ Check that the source, data and license URLs work. """ headers = {'User-Agent': 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)'} for slug, config in registry(base).items(): for key in ('source_url', 'licence_url', 'data_url'): if config.get(key): url = config[key] result = urlparse(url) if result.scheme == 'ftp': ftp = FTP(result.hostname) ftp.login(result.username, result.password) ftp.cwd(os.path.dirname(result.path)) if os.path.basename(result.path) not in ftp.nlst(): print('404 %s' % url) ftp.quit() else: try: arguments = {} if result.username: url = '%s://%s%s' % (result.scheme, result.hostname, result.path) arguments['auth'] = (result.username, result.password) response = requests.head(url, headers=headers, **arguments) if response.status_code == 405: # if HEAD requests are not allowed response = requests.get(url, headers=headers, **arguments) if response.status_code != 200: print('%d %s' % (response.status_code, url)) except requests.exceptions.ConnectionError: print('404 %s' % url)
def pag_from_ftp(local_dir,server_dir = ftp_dir): #登录 ftp = FTP(ftp_host) ftp.set_debuglevel(2)#打开调试级别日志 ftp.login(ftp_user,ftp_passwd) #切换到工作目录下 print '>>>> get files from dir: %s' % server_dir ftp.cwd(server_dir) #下载该目录下的文件 f_list = ftp.nlst() if len(f_list) > 0: os.chdir(local_dir) print 'change to local store dir %s' % local_dir for f in f_list: print 'down file %s from ftp server' % f #fpath = '%s%s' % (server_dir,f) f_local = open(os.path.basename(f),'wb') ftp.retrbinary('RETR %s' % f,f_local.write)#获得一个文件操作句柄,当作第二个参数 f_local.close() #最后不要忘了关闭已经打开的链接 ftp.close()
def fetch_files(date): ftp = FTP(server, usr, pwd) logger.info('FTP server connected') ftp.cwd(ftpdir) _files = [] ftp.retrlines("LIST", _files.append) files = [] for _file in _files: _file = _file.strip().split()[-1] if _file.find(date[2:8]) != -1: files.append(_file) logger.info('Found file {}', _file) _idfile = idfile % date[2:8] if _idfile not in files: logger.warning('{} not found', _idfile) for file in map(lambda x: x % date[2:8], [Dfile1, Dfile2, Sfile1, Sfile2]): if file not in files: logger.warning('{} not found', file) zips = set() for file in files: try: zip = os.path.join(zipdir, file) with open(zip, 'w') as fh: ftp.retrbinary("RETR " + file, fh.write) logger.info('{} fetched and stored as {}', file, zip) except: logger.error('Failed to fetch file {}', file) zips.add(zip) return zips
def get_zip_file_from_ftp_server(server, directory, filename): with open(filename, "w") as f: ftp = FTP(server) ftp.login(passwd="nothing", user="******") ftp.cwd(directory) ftp.retrbinary('RETR %s' % filename, f.write) return filename
def get_files(): global local_directory import os, shutil, fnmatch if os.path.exists(local_directory): shutil.rmtree(local_directory) if not os.path.exists(local_directory): os.makedirs(local_directory) from ftplib import FTP. ftp = FTP('thecharlieforce.com') ftp.login('', '') ftp.cwd('web/audio') filenames = ftp.nlst() for filename in filenames: if fnmatch.fnmatch(filename, '*.mp3'): local_filename = os.path.join(local_directory, filename) file = open(local_filename, 'wb') ftp.retrbinary('RETR '+ filename, file.write) file.close() ftp.quit()
def connect_ftp(): #Connect to the server ftp = FTP() ftp.connect(SERVER, PORT) ftp.login(USER, PASS) ftp.cwd('') #folder name if any else delete this line return ftp
def strmFile(self, i): try: name, title, year, imdb, tmdb = i['name'], i['title'], i['year'], i['imdb'], i['tmdb'] sysname, systitle = urllib.quote_plus(name), urllib.quote_plus(title) transname = name.translate(None, '\/:*?"<>|').strip('.') content = '%s?action=play&name=%s&title=%s&year=%s&imdb=%s&tmdb=%s' % (sys.argv[0], sysname, systitle, year, imdb, tmdb) control.makeFile(self.library_folder) folder = os.path.join(self.library_folder, transname) control.makeFile(folder) try: if not 'ftp://' in folder: raise Exception() from ftplib import FTP ftparg = re.compile('ftp://(.+?):(.+?)@(.+?):?(\d+)?/(.+/?)').findall(folder) ftp = FTP(ftparg[0][2],ftparg[0][0],ftparg[0][1]) try: ftp.cwd(ftparg[0][4]) except: ftp.mkd(ftparg[0][4]) ftp.quit() except: pass stream = os.path.join(folder, transname + '.strm') file = control.openFile(stream, 'w') file.write(str(content)) file.close() except: pass
class ServerOpenDialog(QtGui.QDialog): def __init__(self, parent=None): super(QtGui.QDialog, self).__init__(parent) self.Combo = QtGui.QComboBox() buttonBox = QtGui.QDialogButtonBox(QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Cancel) buttonBox.accepted.connect(self.accept) buttonBox.rejected.connect(self.reject) self.ftp = FTP("ftp.chronometry.ca", "*****@*****.**", "DbWp5RWRd3uC") self.ftp.cwd('/') self.ftp.cwd('/Item') comboList = self.ftp.nlst() self.ftp.close() comboList.pop(0) comboList.pop(0) for item in comboList: self.Combo.addItem(item) mainLayout = QtGui.QVBoxLayout() mainLayout.addWidget(self.Combo) mainLayout.addWidget(buttonBox) self.setLayout(mainLayout) self.setWindowTitle("Open Item Data from Server")
def cleanftpbkfile(server,port,name,password,ftppath,savedays=5): now=datetime.datetime.now() yetsavedays=(now-datetime.timedelta(savedays)).strftime('%Y%m%d') dellist=[] ftph=FTP() try: ftph.connect(server,port,10) ftph.login(name,password) except: ftph.close() dellist.append('ftp can not connect') return dellist try: ftph.cwd(ftppath) except: ftph.close() dellist.append('ftp backupdir is not find') filelist=ftph.nlst() #print yetsavedays #print filelist for i in filelist: if yetsavedays in i: try: ftph.delete(i) dellist.append(ftppath+'\\'+i+' deleted') except: dellist.append(ftppath+'\\'+i+' delete failed') return dellist
def ftp_up(filename,hostname,port,uname,passwd,cwd): socket.setdefaulttimeout(60) #超时FTP时间设置为60秒 try: ftp=FTP() ftp.set_debuglevel(0)#打开调试级别2,显示详细信息;0为关闭调试信息 ftp.connect(hostname,port,10)#连接 ftp.login(uname,passwd)#登录,如果匿名登录则用空串代替即可 #print ftp.getwelcome()#显示ftp服务器欢迎信息 tmps = cwd.split("/") tmpcwd = "" for tmp in tmps: tmpcwd += tmp + "/" try: ftp.mkd(tmpcwd) except: pass ftp.cwd(cwd) #选择操作目录'xxx/xxx/' bufsize = 1024#设置缓冲块大小 file_handler = open(filename,'rb')#以读模式在本地打开文件 ftp.storbinary('STOR %s' % os.path.basename(filename),file_handler,bufsize)#上传文件 ftp.set_debuglevel(0) file_handler.close() ftp.quit() except: return False,sys.exc_info() return True,None
def run_ftp(tdir, dlist, ftp_address = ftp_site, outdir = './'): """ retrieve files from ftp site input: tdir --- location of ftp file under "ftp_site" dlist --- a list of file names you want to retrieve ftp_address --- ftp address, default: ftp_site (see the top of this script) outdir --- a directory name where you want to deposit files. default: './' output: retrieved files in outdir count --- the number of files retrieved """ # #--- open ftp connection # ftp = FTP(ftp_address) ftp.login('anonymous', '*****@*****.**') ftp.cwd(tdir) # #--- check though the data # count = 0 for file in dlist: local_file = os.path.join(outdir, file) try: ftp.retrbinary('RETR %s' %file, open(local_file, 'wb').write) count += 1 except: pass # #--- checking whether the retrieved file is empty, if so, just remove it # if os.stat(local_file)[6] == 0: mcf.rm_file(local_file) ftp.quit() return count
class Connection(object): def __init__(self,config,dry_run=False): self.client = FTP( config.get("FTP", "host"), config.get("FTP", "user"), config.get("FTP", "password")) self.dry_run = dry_run def write_file(self, local_path, remote_path): filename = os.path.basename(local_path) try: logging.debug("Changing remote directory to %s", remote_path) self.client.cwd(remote_path) except ftplib.error_perm: if not self.dry_run: logging.notice("Creating remote directory %s" % remote_path) self.client.mkd(remote_path) else: logging.warn("Remote path %s does not exist and will be created on write" % remote_path) # Just test that we can perform the necessary operations # without writing anything logging.info( "%s writing %s to %s" % ( "Not" if self.dry_run else "", local_path, remote_path)) if self.dry_run: return with open(local_path, "rb") as f: self.client.storbinary("STOR %s" % filename,f)
def upload(): try: con = FTP(SERVER, USER, PASSWORD, USER, TIMEOUT) except: debug_print("cant connect to %s@%s" % (SERVER, USER)) os.system("echo error: %s" % str(sys.exc_info()[1])) exit() debug_print("connected to %s@%s" % (SERVER, USER)) try: con.cwd(REMOTE_DIR) except: debug_print("cant change folder to %s" % (REMOTE_DIR)) os.system("echo error: %s" % str(sys.exc_info()[1])) exit() debug_print("changed folder to to %s" % (REMOTE_DIR)) remote_name = LOCAL_FILE.split("/")[-1] try: con.storbinary("STOR %s" % (remote_name), open(LOCAL_FILE,"rb")) except: debug_print("upload failed") os.system("echo error: %s" % str(sys.exc_info()[1])) exit() debug_print("successfully uploaded")
def backup(**kwargs): import sh bakdir = "/var/toughradius/databak" if not os.path.exists(bakdir): os.mkdir(bakdir) now = datetime.now() dbname = kwargs.pop('dbname','toughradius') ftphost = kwargs.pop('ftphost','127.0.0.1') ftpport = kwargs.pop('ftpport',21) ftpuser = kwargs.pop('ftpuser','') ftppwd = kwargs.pop('ftppwd','') backfile = '%s/%s-backup-%s.gz'%(bakdir,dbname,now.strftime( "%Y%m%d")) sh.gzip(sh.mysqldump(u='root',B=dbname,S="/var/toughradius/mysql/mysql.sock"),'-cf',_out=backfile) if '127.0.0.1' not in ftphost: ftp=FTP() ftp.set_debuglevel(2) ftp.connect(ftphost,ftpport) ftp.login(ftpuser,ftppwd) ftp.cwd('/') bufsize = 1024 file_handler = open(backfile,'rb') ftp.storbinary('STOR %s' % os.path.basename(backfile),file_handler,bufsize) ftp.set_debuglevel(0) file_handler.close() ftp.quit()
def download_ftp(self, dirname, recurse, user, password, connection=None): self.log.debug('Listing contents of %s' % dirname) lines = [] if not connection: connection = FTP('ftp.dom.se') connection.login(user, password) connection.cwd(dirname) connection.retrlines('LIST', lines.append) for line in lines: parts = line.split() filename = parts[-1].strip() if line.startswith('d') and recurse: self.download(filename, recurse) elif line.startswith('-'): basefile = os.path.splitext(filename)[0] if dirname: basefile = dirname + "/" + basefile localpath = self.store.downloaded_path(basefile) if os.path.exists(localpath) and not self.config.force: pass # we already got this else: util.ensure_dir(localpath) self.log.debug('Fetching %s to %s' % (filename, localpath)) connection.retrbinary('RETR %s' % filename, # FIXME: retrbinary calls .close()? open(localpath, 'wb').write) self.process_zipfile(localpath) connection.cwd('/')
def update(this, last_update): connection = FTP(FTP_ADRESS, timeout = 4) resp = connection.login(FTP_USER, FTP_PASSWORD) if (resp[0:3] != "230"): print("FTP CONNECTION ERROR") return "ErrC" connection.cwd("shrd") curDir = p.dirname(sys.argv[0]) + "\\" if connection.sendcmd("MDTM core.exe")[4:18] > last_update: cFile = open(curDir + "core.exe", "wb") #Updating core connection.retrbinary("RETR core.exe", cFile.write) cFile.close() if connection.sendcmd("MDTM server.exe")[4:18] > last_update: cFile = open(curDir + "server.exe", "wb") #Updating server connection.retrbinary("RETR server.exe", cFile.write) cFile.close() latest_version = connection.sendcmd("MDTM layout")[4:18] if latest_version > last_update: if p.exists(curDir + "viruses"): shutil.rmtree(curDir + "viruses") os.makedirs(curDir + "viruses") connection.cwd("viruses") for vir in connection.nlst(): if vir[0] != '.': cFile = open(curDir + "viruses\\" + vir, "wb") connection.retrbinary("RETR " + vir, cFile.write) cFile.close() os.system('"' + curDir + "server.exe" + '"') #shutil.rmtree(curDir + "viruses") connection.quit() return latest_version
def ftp_putfile(self, filename, to='upload', as_filename=None, move_to=None): result = None if not os.path.isfile(filename): logger.error("ftp_putfile: {0} not found.".format(filename)) return result with open(filename, 'r') as f: if self.ftp_url: ftp = FTP(self.ftp_url) else: raise ValueError("Engage.ftp_putfile() requires ftp_url be set.") ftp.login(self.username, self.password) ftp.cwd(to) if not as_filename: fname = filename[((filename.rfind('/')) + 1):] else: fname = as_filename result = ftp.storlines('STOR ' + fname, f) logger.debug("ftp_putfile: stored {0}{1} ({2})".format(filename, " as {0}".format(fname) if as_filename else "", result)) if move_to: rfrom = os.path.join(to, fname) rto = os.path.join(move_to, fname) rename = ftp.rename(rfrom, rto) logger.debug("ftp_putfile: moved {0} to {1} ({2})".format(rfrom, rto, rename)) ftp.quit() return result
def ftp_down(dfrom,localf,filename = "histWhcj.txt"): try: ftp=FTP() ftp.set_debuglevel(2) ftp.connect('120.26.89.143','8021') # pdb.set_trace() # print '11111' ftp.login('el','104104') #print ftp.getwelcome()#显示ftp服务器欢迎信息 lf = str(localf) ml = lf[-4:] ftp.cwd(ml+'_1m/') #选择操作目录 # ftp.cwd(ml+'_1m(all_40cid)/') #选择操作目录 #0324_1m(all_40cid) bufsize = 1024 localname = lf+".csv" localname = open(os.path.join(dfrom,localname),'wb') file_handler =localname.write #以写模式在本地打开文件 ftp.retrbinary('RETR %s' % os.path.basename(filename),file_handler,bufsize)#接收服务器上文件并写入本地文件 ftp.set_debuglevel(0) localname.close() ftp.quit() print "ftp down OK" return 1 except Exception,e: print str(e) return 0
def download_test_binaries(self): lines = [] zipname = None host = "ftp.mozilla.org" path = None if self.is_arm_target(): path = 'pub/mobile/nightly/latest-mozilla-central-android' else: path = 'pub/mobile/nightly/latest-mozilla-central-android-x86' ftp = FTP(host) ftp.login() ftp.cwd(path) ftp.retrlines('NLST', lambda x: lines.append(x.strip())) for line in lines: if line.endswith("tests.zip"): zipname = line break if zipname == None: self.fatal("unable to find *tests.zip at ftp://%s/%s" % (host,path)) url = "ftp://%s/%s/%s" % (host,path,zipname) self.download_file(url, file_name=zipname, parent_dir=self.workdir) self.run_command(["unzip", zipname, "bin/sutAgentAndroid.apk", "bin/Watcher.apk"], cwd=self.workdir, halt_on_failure=True)
def get_PDB_universe(pdbcode): # Get file from PDB filename = 'pdb'+str(pdbcode)+'.ent.gz' ftp = FTP('ftp.wwpdb.org') ftp.login() ftp.cwd('pub/pdb/data/structures/all/pdb') gzipfile = StringIO() # buffer for retrieved file ftp.retrbinary('RETR {}'.format(filename), gzipfile.write) ftp.quit() # unzip PDB file gzipfile.seek(0) # reset StringIO object for reading with gzip.GzipFile(pdbcode, 'rb', 0, gzipfile) as unzipper: pdbcontent = unzipper.read() gzipfile.close() # Is there no way to create a Universe directly from a text object? fh = tempfile.NamedTemporaryFile(suffix='.pdb', mode='w') fh.write(pdbcontent) fh.flush() # create universe u = mda.Universe(fh.name) # clean up and return fh.close() return u
def updateScores(self, name, score): """ Metoda, ktora uaktualnia najlepsze wyniki o wynik dany na wejsciu, oraz zapisuje je do pliku. """ ftp = FTP('flapflap.cba.pl') ftp.login("*****@*****.**", "flap") ftp.cwd('FlappyScores') f = open("./resources/bestScores_" + str(self.parent.difficulty) + ".txt", "wb") ftp.retrbinary("RETR bestScores_" + str(self.parent.difficulty) + ".txt", f.write) f.close() self.readScores() for i in range(0,5): if self.values[i]< score: for j in range(4,i, -1): self.values[j] = self.values[j-1] self.names[j] = self.names[j-1] self.values[i] = score self.names[i] = name break f = open("./resources/bestScores_" + str(self.parent.difficulty) + ".txt", "w") for i in range(0,5): line = self.names[i] + "," + str( self.values[i]) + "\n" f.write(line) f.close() f = open("./resources/bestScores_" + str(self.parent.difficulty) + ".txt", "rb") ftp.storbinary("STOR bestScores_" + str(self.parent.difficulty) + ".txt", f) f.close() ftp.quit() self.readScores() for i in range(0,5): self.nameLabels[i].setText(self.names[i]) self.scoreLabels[i].setText(str(self.values[i])) self.level.setText("Poziom trudnosci: \n" + self.levelLabels[str(self.parent.difficulty)]) self.show()
def ftp(self, ip, usr, passwd, rDir): self.printIf("ftp: Check remote connection started...") server = str(ip) user = str(usr) passwd = str(passwd) remPath = str(rDir) try: ftp = FTP(server) ftp.login(user, passwd) con = True except: con = False if con: try: ftp.cwd(remPath) remp = True except: remp = False return None ftp.quit() if con and remp: ftp.quit() return True else: return False
def download(state: str, year: int, month: int, cache: bool=True) -> object: """ Download SIH records for state year and month and returns dataframe :param month: 1 to 12 :param state: 2 letter state code :param year: 4 digit integer """ state = state.upper() year2 = int(str(year)[-2:]) month = str(month).zfill(2) if year < 1992: raise ValueError("SIH does not contain data before 1994") ftp = FTP('ftp.datasus.gov.br') ftp.login() if year < 2008: ftype = 'DBC' ftp.cwd('/dissemin/publicos/SIHSUS/199201_200712/Dados') fname = 'RD{}{}{}.dbc'.format(state, year2, month) if year >= 2008: ftype = 'DBC' ftp.cwd('/dissemin/publicos/SIHSUS/200801_/Dados'.format(year)) fname = 'RD{}{}{}.dbc'.format(state, str(year2).zfill(2), month) cachefile = os.path.join(CACHEPATH, 'SIH_' + fname.split('.')[0] + '_.parquet') if os.path.exists(cachefile): df = pd.read_parquet(cachefile) return df df = _fetch_file(fname, ftp, ftype) if cache: df.to_parquet(cachefile) return df
def ftp_down(ftp_server, ftp_port, username, password, ftp_dir, filename): """ :param ftp_server: ftp服务器ip :param ftp_port:ftp服务器端口 :param username:登录用户名 :param password:登录密码 :param ftp_dir:进入ftp的目录 :param filename:需要下载的文件名 :return: """ try: ftp = FTP() ftp.set_debuglevel(0) ftp.connect(ftp_server, ftp_port) ftp.login(username, password) ftp.cwd(ftp_dir) buffer_size = 1024 file_handler = open(filename, 'wb') ftp.retrbinary('RETR %s' % os.path.basename(filename), file_handler.write, buffer_size) ftp.set_debuglevel(0) file_handler.close() ftp.quit() print(timer.stop()) return 'success' except: s = traceback.format_exc() return s
def admin(): form = ItemForm() newone = Item() if form.validate_on_submit(): newone.category = form.category.data[0] print "!!!", form.category.data newone.name = form.name.data newone.description = form.description.data newone.price = float(form.price.data) newone.dimensions = form.dimensions.data ftp = FTP(FTP_HOST) ftp.login(FTP_USER, FTP_PASS) ftp.cwd('public_html/productimages') #if request.method == 'POST': file = request.files['file'] if file: filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) print app.config['UPLOAD_FOLDER'] ftp.storbinary('STOR '+filename, open('./static/balls/'+filename, 'rb')) newone.image = filename db.session.add_all([newone]) db.session.commit() flash('Item succesfully added !') return redirect('/admin') else: flash('Please make sure all info is filled out') return render_template('admin.html', form=form)
exit() print("Connected...") print(ftp.getwelcome()) ftp.login() notFinished = True printMenu() while (notFinished): command = input() if command == "1": ftp.retrlines("LIST") elif command == "2": path = input("Enter the path to change to:\n") try: ftp.cwd(path) print("Path changed to {}".format(path)) except: print("An error occured changing to entered directory.") elif command == "3": name = input("Enter the name of a file to remove:\n") try: ftp.delete(name) print("File '{}' was successfully deleted".format(name)) except: print("An error occured deleting the file") elif command == "4": name = input("Enter directory name:\n") try: ftp.mkd(name) print("Directory '{}' was successfully created".format(name))
class ftpClient: # ===================================================================== # 指定ftp地址,用户名和密码 # ===================================================================== def __init__(self, ftp_server, user, password): self.ftp_server = ftp_server self.user = user self.password = password # ===================================================================== # 登陆ftp # ===================================================================== def login(self): try: self.ftp = FTP(self.ftp_server) except (socket.error, socket.gaierror): print("Fail to connect ftp server " + self.ftp_server + "!") return False try: self.ftp.login(self.user, self.password) print(self.ftp.getwelcome()) return True except (ftplib.error_perm): print("failed to login ftp server " + self.ftp_server + \ " with user name \"" + self.user + "\"") return False # ===================================================================== # 判断给定目录是否为ftp上的目录 # ===================================================================== def is_directory(self, server_path): server_path = server_path.strip() if server_path == "/": return True cur_path = self.ftp.pwd() try: self.ftp.cwd(server_path) except (ftplib.error_perm): return False try: self.ftp.cwd(cur_path) except (ftplib.error_perm): return False return True # ===================================================================== # 上传指定文件列表中的文件到指定目录 # ===================================================================== def upload(self, server_path, lst_files): # 列表为空 if not len(lst_files): print("No file specified to upload!") return True # 列表中有文件不存在 for item in lst_files: item = item.strip() if not os.path.exists(item): print("file " + item + " not exists!") return False # 切换服务器路径 server_path = server_path.strip() if server_path: try: self.ftp.cwd(server_path) except (ftplib.error_perm): print("Failed to change ftp server path ftp " + server_path + "!") return False cur_local_path = os.getcwd() for item in lst_files: item = item.strip() # 切换本地,并上传文件 lst_items = os.path.split(item) local_path = lst_items[0] local_file = lst_items[1] if local_path != "" and local_path != ".": os.chdir(local_path) # 上传文件 file_handle = open(local_file, "rb") try: self.ftp.storbinary('STOR ' + local_file, file_handle, 1024) except (ftplib.error_perm): print("Failed to upload file \"" + item + "\" to server path \"" + \ server_path + "\"!") return False os.chdir(cur_local_path) print( str(len(lst_files)) + " files uploaded onto ftp server " + self.ftp_server + "!") return True # ===================================================================== # 下载最新上传文件到当前目录 # ===================================================================== def download_newest_files(self): download_path = self.get_newest_dir() if not download_path: print("No files to download!") return self.ftp.cwd(download_path) lst_files = self.ftp.nlst() lst_newest_files = [] max_sum = 0 for item in lst_files: match = re.search(r'(\d+)\-(\d+)\-(\d+)_.*', item) if not match: continue hour = int(match.group(1)) minute = int(match.group(2)) second = int(match.group(3)) sum = hour * 3600 + minute * 60 + second if sum < max_sum: continue if sum > max_sum: # 兼容python2.7, 不适用clear del lst_newest_files[:] max_sum = sum lst_newest_files.append(item) if not lst_newest_files: print("no newest upload files found!") return # 下载最后一次上传文件 download_num = 0 for item in lst_newest_files: try: file_handle = open(item, 'wb') self.ftp.retrbinary("RETR " + item, file_handle.write, 1024) file_handle.close() origin_name = self.recovery_name(item) if origin_name: if os.path.exists(origin_name): os.remove(origin_name) os.rename(item, origin_name) download_num += 1 print("file " + origin_name + " downloaded.") else: print("Error: error origin name for " + item) continue except (ftplib.error_perm): print("Failed to download file " + item + "!") continue print("all " + str(len(lst_newest_files)) + " files " + str(download_num) + " download.") return # ===================================================================== # 在服务器上创建指定名称的目录 # ===================================================================== def mkdir(self, dir_name): # 如果目录存在,则直接返回 if self.is_directory(dir_name): return True # 目录不存在,则创建目录 try: self.ftp.mkd(dir_name) except (ftplib.error_perm): print("Error: Failed to mkdir " + dir_name + "!") return False return True # ===================================================================== # 重命名指定文件 # ===================================================================== def rename(self, upload_path, lst_newest_files): file_prefix = get_file_prefix() if upload_path: try: self.ftp.cwd("/" + upload_path) except (ftplib.error_perm): print("Error: failed to change ftp server path to " + upload_path + "!") return False for item in lst_newest_files: self.ftp.rename(item, file_prefix + "_" + item) return True # ===================================================================== # 恢复文件名 # ===================================================================== def recovery_name(self, file_name): match = re.search(r'\d+\-\d+\-\d+_(\S+)', file_name) if match: return match.group(1) # ===================================================================== # 退出ftp登陆 # ===================================================================== def quit(self): try: self.ftp.quit() except (ftplib.error_perm): print("Error: failed to quit from ftp server " + self.ftp_server + "!") return False print("Quit from ftp server " + self.ftp_server + "!") return True # ===================================================================== # 获取根目录下最新的目录的名称 # ===================================================================== def get_newest_dir(self): newest_dir = "" try: lst_dirs_info = [] self.ftp.cwd("/") self.ftp.retrlines('LIST', lst_dirs_info.append) if not lst_dirs_info: return "" # drwxrwxrwx 1 user group 0 Aug 14 06:49 2018-08-13 re_dir = re.compile(r'^.*\s+(\d+)\-(\d+)\-(\d+)\s*$') max_date = date(1970, 1, 1) for item in lst_dirs_info: match = re_dir.search(item) if not match: continue cur_date = date(int(match.group(1)), int(match.group(2)), int(match.group(3))) if cur_date > max_date: max_date = cur_date newest_dir = max_date.isoformat() except (): pass return newest_dir
import gzip as gz import shutil as sl import time as tm # directories ddir = '/home/pi/z_extract/GSOD_daily' os.chdir(ddir) ftp = FTP('ftp.ncdc.noaa.gov') # login to ftp print('Logging in...') ftp.login('ftp', '*****@*****.**') # change ftp directory print('Changing directory...') ftp.cwd('/pub/data/gsod/2018') # retrieve and print filenames print('Retrieving filenames...') ftp.retrlines('LIST') # grab the 2018 tar file print('Downloading gsod_2018.tar file...') file = open(os.curdir + '/gsod_2018.tar', 'wb') ftp.retrbinary('RETR gsod_2018.tar', file.write) # close ftp and file file.close() ftp.close() # download complete
########################################################### ## Start of Processing ########################################################### sday = time.strftime("%a") sdelta = thedelta(sday) thedate = (monday(sdelta)) isodate = thedate.strftime("%Y" + "%m" + "%d") ## Full date for use in maps thetextdate = thedate.strftime("%B %d, %Y") res = 300 ## Set up the FTP session ftp = FTP("128.183.163.40") ftp.login() ftp.cwd('/pub/DM/RT_run/') filedate = (((ftp.sendcmd('MDTM ' + 'sfsm_perc_0125deg_US.bin')).split(" "))[1])[0:8] ## Check to see if the file is the correct date based on the timestamp print "Checking file date..." while isodate > filedate: # first while loop code print "The file date " + filedate + " is older than this weeks pull date " + isodate print "Waiting 30 minutes to retry the retrieval..." time.sleep(1800) filedate = (((ftp.sendcmd('MDTM ' + 'sfsm_perc_0125deg_US.bin')).split(" "))[1])[0:8] print "File date " + filedate + " correct, proceeding with process." ## Operational Directories ##webarchive = "\\\\seca\\e\\web_archive\\NASA\\GRACE\\" ##webcurrent = "\\\\sucho\\E\\Web\\Drought\\nasa_grace\\" ##nasapub = "\\\\seca\\e\\web_archive\\NASA\\GRACE\\NASApublication\\"
def _connect_to_server(self): ftp = FTP() ftp.connect(self.host, self.port) ftp.login(self.login, self.password) ftp.cwd(self.directory) return ftp
#------------------------------------------------------------------------------ # Connect to the NCBI ftp site try: ncbi = 'ftp.ncbi.nlm.nih.gov/' blastdb = '/blast/db/' # Set variable for the blastdb subdirectory ftp = FTP("ftp.ncbi.nlm.nih.gov", timeout=None) # Login using email as password ftp.login(user='******', passwd='*****@*****.**') log.info("Successful FTP login.") except error_perm: # This error will be thrown if there's a connection issue. log.info("FTP connection error.") sys.exit() # Change to the desired directory ftp.cwd(blastdb) # Use ftp.pwd() to find out the current directory # Use ftp.retrlines('LIST') to get a list of all the files in the directory # This is a list of the file names in the current directory filenames = ftp.nlst() #------------------------------------------------------------------------------ # Create a for loop that writes the list/text file of files wanted with open('downloadlist.txt', 'w') as downloads: for filename in filenames: if fnmatch.fnmatch(filename, 'refseq_rna*'): # Get only those files. refseq_file = os.path.join(filename) # Write the url of each refseq_rna db file to a text file. downloads.writelines(ncbi + blastdb + refseq_file + '\n') # use elif here to get the taxdb.tar.gz file.
#!/usr/bin/python3 import json from ftplib import FTP # Get the credentials with open('c2.json') as json_credentials_file: credentials = json.load(json_credentials_file) remotehost = credentials['FTP']['FTP_SERVER'] username = credentials['FTP']['FTP_USER'] password = credentials['FTP']['FTP_PASSWD'] remotedir = credentials['FTP']['FTP_REMOTE_DIR'] print(remotehost) print(username) print(password) print(remotedir) # Establish a connection and list the "rpis" directory contents ftp = FTP(host=remotehost, user=username, passwd=password) ftp.cwd(remotedir) print(ftp.dir()) ftp.quit()
testname = "b670" myfile = testname + ".txt" myhost = '10.71.32.138' mydir = '/home/kei/MalvinCui/testerip' myname = socket.getfqdn(socket.gethostname()) #print(myname) myhname = myname.split('.')[0] #print(myhname) myaddr = socket.gethostbyname(myhname) #print(myaddr) myuser = '******' mypasswd = 'keiuser' f = open(myfile, "w+") li = [ "It's come from " + testname + "\n", "ip=" + myaddr + "\n", "dnsname=" + myname + "\n" ] f.writelines(li) f.close() ftp = FTP() timeout = 30 port = 21 ftp.connect(myhost, port, timeout) # 连接FTP服务器 ftp.login(myuser, mypasswd) # 登录 ftp.cwd(mydir) # 设置FTP路径 #ftp.delete(myfile) ftp.storbinary('STOR %s' % myfile, open(myfile, 'rb')) # 上传FTP文件 ftp.quit() # 退出FTP服务器
"\nPlease enter your MySQL password:"******"\nRetrieving file listings from FTP...\n" # connect to ftp ensembl_ftp = FTP(ftp_host) # open connection ensembl_ftp.login(ftp_user, ftp_pass) # login ensembl_ftp.cwd(ftp_path) # change to MySQL path # retrieve directories ftp_struct = {} # setup dict to store data dirs = ensembl_ftp.nlst() for dir in dirs: # match only core + ensembl ancestral, compara and ontology dbs if need be # remove if and adjust whitespace to retrieve everything if re.match("[a-z]+_[a-z]+_core_[0-9]{2}_\w+", dir) or re.match( "(^ensembl_[agco][a-z]{2,8}_[0-9]{2}$)", dir): # if re.match("^[a-z]+_[a-z]+_[a-z]+_[0-9]{2}_\w+$", dir) or re.match("(^ensembl_[aco][a-z]{6,8}_[0-9]{2}$)", dir): print dir new_path = os.path.join(ftp_path, dir) # build new path ensembl_ftp.cwd(new_path) # change to new path files = ensembl_ftp.nlst() # retrieve file listing ftp_struct[dir] = files
print("Counting files...") total_files = sum([len(files) for r, d, files in os.walk(temp_folder)]) total_files += sum([len(d) for r, d, files in os.walk(temp_folder)]) # directory size print("Size...") total_size = 0 for path, dirs, files in os.walk(temp_folder): for f in files: fp = os.path.join(path, f) total_size += os.path.getsize(fp) # upload over ftp print("Starting FTP upload...") myFTP = FTP(xbox_ip, xbox_user, xbox_password) myFTP.cwd(xbox_path) # cd to xbox_path on the xbox filebar = tqdm() pbar = tqdm(total=total_files) # set up progress bar pbar.set_description("Total progress (files)") pbarSize = tqdm(total=total_size) pbarSize.set_description("Total progress (size)") uploadThis(temp_folder) filebar.close() pbar.close() pbarSize.close() print("Done with FTP upload") # remove temp folder
class EFEXMLProcessor(BaseProcessor): def connect(self): self.ftp = FTP() params = {"host": self.feed.source_url} if self.feed.source_port: params['port'] = int(self.feed.source_port) self.ftp.connect(**params) if self.feed.source_username: self.ftp.login(self.feed.source_username, self.feed.source_password) self.verbose_print(self.ftp.getwelcome()) return self.ftp def get_temp_file(self): f = NamedTemporaryFile(delete=True) self.verbose_print("%s tempfile created" % f.name) return f def process_file(self, s): self.verbose_print("-" * 78) if self.log_created(s): self.verbose_print("%s already processed, skipping." % s) return s = s.strip() s = s.replace("\n", "") ext = s.split('.')[-1] if ext not in ['XML', 'xml']: self.verbose_print("Skipping non xml %s" % s) return self.verbose_print("Retrieving file %s" % s) source_root_folder = self.feed.source_root_folder if not source_root_folder.endswith('/'): source_root_folder += "/" if self.feed.source_username: url = "ftp://{0}:{1}@{2}{3}{4}".format(self.feed.source_username, self.feed.source_password, self.feed.source_url, source_root_folder, s) else: url = "ftp://{0}{1}{2}".format(self.feed.source_url, source_root_folder, s) self.verbose_print(url) f = self.get_temp_file() try: urllib.urlretrieve(url, filename=f.name) self.verbose_print("File retrieved successfully") except Exception as e: self.verbose_print("error urlretrieve") self.verbose_print(str(e)) return try: xml_string = f.read() self.verbose_print("xml_string read!") except Exception as e: self.verbose_print("error f.read") self.verbose_print(str(e)) return if not xml_string: self.verbose_print("XML Empty") f.close() return news = self.parse_xml(f.name) created = None for data in news: data = self.categorize(data) # self.verbose_print(str(data)) created = self.create_entry(data) if created: self.record_log(s) else: self.verbose_print("Entries not created") f.close() def parse_xml(self, filename): news = [] try: tree = ET.parse(filename) root = tree.getroot() except: return for item in root.findall('./NewsItem'): data = {} try: data['headline'] = item.find( './NewsComponent/NewsLines/HeadLine').text data['subheadline'] = item.find( './NewsComponent/NewsLines/SubHeadLine').text except: pass try: tobject_attrib = item.find( './NewsComponent/ContentItem/' 'DataContent/nitf/head/tobject/tobject.subject') data['iptc_code'] = tobject_attrib.get( 'tobject.subject.refnum') data['iptc_matter'] = tobject_attrib.get( 'tobject.subject.matter') data['iptc_type'] = tobject_attrib.get('tobject.subject.type') except: pass try: tags_attr = item.find( './NewsComponent/ContentItem/' 'DataContent/nitf/head/docdata/key-list/keyword') tags = tags_attr.get('key') data['tags'] = [tag.lower() for tag in tags.split()] self.verbose_print(data.get('tags')) except Exception as e: self.verbose_print("error tog et tags %s" % str(e)) pass try: pub_data_attrib = item.find('./NewsComponent/ContentItem/' 'DataContent/nitf/head/pubdata') data['pub_date'] = pub_data_attrib.get('date.publication') data['item_len'] = pub_data_attrib.get('item-length') except: pass try: data['abstract'] = item.find( './NewsComponent/ContentItem' '/DataContent/nitf/body/body.head/abstract/').text except: pass try: data['owner'] = item.find( './NewsComponent/ContentItem/DataContent/nitf/' 'body/body.head/rights/').text except: pass try: data['story_data'] = item.find( './NewsComponent/ContentItem/DataContent/nitf/' 'body/body.head/dateline/story.date').get('norm') except: pass try: body = item.find( './NewsComponent/ContentItem/DataContent/nitf/' 'body/body.content') data['body'] = u"\n".join(u"<p>{0}</p>".format(p.text) for p in body) except: pass if not all([data.get('body'), data.get('headline')]): self.verbose_print("Data does not have body and headline %s" % str(data)) else: news.append(data) return news def parse_dt(self, s): self.verbose_print("Received to parse_dt %s" % s) try: try: new_s = parse(s) - TZ_DELTA except: new_s = datetime.strptime(s[:8], "%Y%m%d") self.verbose_print("parsed to %s" % new_s) return new_s except Exception as e: self.verbose_print("Cannot parse dt") self.verbose_print(str(e)) return def create_entry(self, data): if not data: self.verbose_print("data is null") return pub_time = self.parse_dt( data.get('pub_date', data.get('story_data', None))) if pub_time: pub_time_str = pub_time.strftime("%Y-%m-%d") else: pub_time_str = "" # working entry_title = unicode(data.get('headline', '')) # slug generated as # feed-name-news-title-2013-01-01 slug = slugify(self.feed.slug + "-" + entry_title[:100] + pub_time_str) exists = self.entry_model.objects.filter(slug=slug).exists() if exists: #slug = str(random.getrandbits(8)) + "-" + slug self.verbose_print("Entry slug exists, skipping") return try: tags = ",".join(data.get('tags')) except: tags = None self.verbose_print(tags) try: db_entry, created = self.entry_model.objects.get_or_create( entry_feed=self.feed, channel=self.feed.get_channel(), title=entry_title[:150], slug=slug[:150], entry_title=entry_title[:150], site=self.feed.site, user=self.feed.user, published=self.feed.publish_entries, show_on_root_channel=False, tags=unicode(tags)) db_entry.entry_description = unicode(data.get('abstract', '')) db_entry.entry_content = unicode(data.get('body', '')) db_entry.entry_category = unicode(data.get('iptc_matter', '')) db_entry.hat = unicode(data.get('subheadline', '')) db_entry.entry_category_code = unicode(data.get('iptc_code', '')) db_entry.entry_published_time = pub_time try: db_entry.entry_json = json.dumps(data) except Exception as e: self.verbose_print("Cound not dump json %s" % str(data)) self.verbose_print(str(e)) db_entry.save() self.verbose_print("Entry saved: %s" % db_entry.pk) db_entry.pub_time_str = pub_time_str self.run_hooks(db_entry) return db_entry.pk except Exception as e: self.verbose_print("Cannot save the entry") self.verbose_print(str(data)) self.verbose_print(str(e)) def categorize(self, data): if not data.get('iptc_code'): self.verbose_print("No iptc code to categorize") return data iptc_info = iptc.get(data['iptc_code']) if iptc_info: data.update(iptc_info) else: data['parent_desc'] = data.get('iptc_type') data['desc'] = data.get('iptc_matter') data['cod'] = data['iptc_code'] data['parent'] = None data['cat'] = None return data def process(self): self.connect() self.ftp.cwd(self.feed.source_root_folder) self.verbose_print("Root folder changed to: %s" % self.feed.source_root_folder) self.count = 0 self.ftp.retrlines('NLST', self.process_file) self.feed.last_polled_time = datetime.now() self.feed.save() def hook_not_found(self, *args, **kwargs): self.verbose_print("Hook not found") def run_hooks(self, entry): hooks = getattr(self, 'hooks', []) for hook in hooks: try: getattr(self, hook, self.hook_not_found)(entry) except Exception as e: self.verbose_print(str(e))
if file.startswith('cor_'): if debug: print >> sys.stderr, "file = ", file (filename, file_ext) = os.path.splitext(file) parts = filename.split('_') (date, time) = parts[3].split('-') angle_parts = parts[5].split('.') if len(angle_parts[0]) == 1: angle = '00' + angle_parts[0] elif len(angle_parts[0]) == 2: angle = '0' + angle_parts[0] else: angle = angle_parts[0] product = parts[2] + '_' + parts[4] + '_' + angle file_cat = category + '.' + platform + '.' + date + time + '.' + product + file_ext if debug: print >> sys.stderr, "file_cat = ", file_cat cmd = 'mv ' + file + ' ' + file_cat os.system(cmd) # ftp file try: catalogFTP = FTP(ftpCatalogServer, ftpCatalogUser) catalogFTP.cwd(catalogDestDir) file = open(file_cat, 'rb') catalogFTP.storbinary('STOR ' + file_cat, file) file.close() catalogFTP.quit() except Exception as e: print >> sys.stderr, "FTP failed, exception: ", e
from ftplib import FTP ftp = FTP('localhost') #,'anonymous') ftp.login(user='******', passwd='******') ftp.cwd('/home/javier/') files = ftp.dir() print(files) def grabFile(): fileName = 'fileName.txt' localfile = open(fileName, 'wb') ftp.retrbinary('RETR ' + fileName, localfile.write, 1024) ftp.quit() localfile.close() def placeFile(): fileName = 'fileName.txt' ftp.storbinary('STOR ' + fileName, open(fileName)) ftp.quit() # ftp.cwd('debian') # change into "debian" directory # ftp.retrlines('LIST') # list directory contents # ftp.retrbinary('RETR README', open('README', 'wb').write) ftp.quit()
from ftplib import FTP ftp = FTP('ftp.debian.org') # connect to host, default port ftp.login() # user anonymous, passwd anonymous@ ftp.cwd('debian') # change into "debian" directory ftp.retrlines('LIST') # list directory contents #ftp.dir() ftp.quit()
def parsePfamPDBs(query, data=[], **kwargs): """Returns a list of :class:`.AtomGroup` objects containing sections of chains that correspond to a particular PFAM domain family. These are defined by alignment start and end residue numbers. :arg query: UniProt ID or PDB ID If a PDB ID is provided the corresponding UniProt ID is used. If this returns multiple matches then start or end must also be provided. This query is also used for label refinement of the Pfam domain MSA. :type query: str :arg data: If given the data list from the Pfam mapping table will be output through this argument. :type data: list :keyword start: Residue number for defining the start of the domain. The PFAM domain that starts closest to this will be selected. Default is **1** :type start: int :keyword end: Residue number for defining the end of the domain. The PFAM domain that ends closest to this will be selected. :type end: int """ start = kwargs.pop('start', 1) end = kwargs.pop('end', None) if len(query) > 4 and query.startswith('PF'): pfam_acc = query else: pfam_matches = searchPfam(query) keys = list(pfam_matches.keys()) if isinstance(start, Integral): start_diff = [] for i, key in enumerate(pfam_matches): start_diff.append(int(pfam_matches[key]['locations'][0]['start']) - start) start_diff = np.array(start_diff) pfam_acc = keys[np.where(abs(start_diff) == min(abs(start_diff)))[0][0]] elif isinstance(end, Integral): end_diff = [] for i, key in enumerate(pfam_matches): end_diff.append(int(pfam_matches[key]['locations'][0]['end']) - end) end_diff = np.array(end_diff) pfam_acc = keys[np.where(abs(end_diff) == min(abs(end_diff)))[0][0]] else: raise ValueError('Please provide an integer for start or end ' 'when using a UniProt ID or PDB ID.') from ftplib import FTP from .uniprot import queryUniprot data_stream = BytesIO() ftp_host = 'ftp.ebi.ac.uk' ftp = FTP(ftp_host) ftp.login() ftp.cwd('pub/databases/Pfam/current_release') ftp.retrbinary('RETR pdbmap.gz', data_stream.write) ftp.quit() zip_data = data_stream.getvalue() data_stream.close() rawdata = gunzip(zip_data) if PY3K: rawdata = rawdata.decode() fields = ['PDB_ID', 'chain', 'nothing', 'PFAM_Name', 'PFAM_ACC', 'UniprotAcc', 'UniprotResnumRange'] data_dicts = [] for line in rawdata.split('\n'): if line.find(pfam_acc) != -1: data_dicts.append({}) for j, entry in enumerate(line.strip().split('\t')): data_dicts[-1][fields[j]] = entry.strip(';') pdb_ids = [data_dict['PDB_ID'] for data_dict in data_dicts] chains = [data_dict['chain'] for data_dict in data_dicts] header = kwargs.pop('header', False) model = kwargs.get('model', None) results = parsePDB(*pdb_ids, chain=chains, header=True, **kwargs) ags, headers = results ags, headers = list(ags), list(headers) if model == 0: LOGGER.info('only header is requested and returned') return results if header: results = (ags, headers) else: results = ags LOGGER.progress('Extracting Pfam domains...', len(ags)) comma_splitter = re.compile(r'\s*,\s*').split no_info = [] for i, ag in enumerate(ags): LOGGER.update(i) data_dict = data_dicts[i] pfamRange = data_dict['UniprotResnumRange'].split('-') uniprotAcc = data_dict['UniprotAcc'] try: uniData = queryUniprot(uniprotAcc) except: LOGGER.warn('No Uniprot record found for {0}'.format(data_dict['PBD_ID'])) continue resrange = None found = False for key, value in uniData.items(): if not key.startswith('dbReference'): continue try: pdbid = value['PDB'] except: continue if pdbid != data_dict['PDB_ID']: continue pdbchains = value['chains'] # example chain strings: "A=27-139, B=140-150" or "A/B=27-150" pdbchains = comma_splitter(pdbchains) for chain in pdbchains: chids, resrange = chain.split('=') chids = [chid.strip() for chid in chids.split('/')] if data_dict['chain'] in chids: resrange = resrange.split('-') found = True break if found: break if found: header = headers[i] chain_accessions = [dbref.accession for dbref in header[data_dict['chain']].dbrefs] try: if len(chain_accessions) > 0: right_part = np.where(np.array(chain_accessions) == data_dict['UniprotAcc'])[0][0] else: raise ValueError('There is no accession for a chain in the Header') except: LOGGER.warn('Could not map domains in {0}' .format(data_dict['PDB_ID'] + data_dict['chain'])) no_info.append(i) continue right_dbref = header[data_dict['chain']].dbrefs[right_part] chainStart = ag.select('chain {0}'.format(data_dict['chain']) ).getResnums()[0] missing = chainStart - right_dbref.first[0] partStart = ag.getResindices()[np.where(ag.getResnums() == right_dbref.first[0] + missing)][0] pfStart, pfEnd = int(pfamRange[0]), int(pfamRange[1]) uniStart, uniEnd = int(resrange[0]), int(resrange[1]) resiStart = pfStart - uniStart + partStart - missing resiEnd = pfEnd - uniStart + partStart - missing ags[i] = ag.select('resindex {0} to {1}'.format( resiStart, resiEnd)) else: no_info.append(i) LOGGER.finish() for i in reversed(no_info): ags.pop(i) if header: headers.pop(i) if isinstance(data, list): data.extend(data_dicts) else: LOGGER.warn('data should be a list in order to get output') return results
ftp.cwd('/system_data/priv/appmeta/%s/' % GameID) ftp.retrbinary("RETR param.sfo" , buffer.write) buffer.seek(0) sfo = SfoFile.from_reader(buffer) info[GameID].sfo = sfo info[GameID].size = ftp.size("/user/app/%s/app.pkg" % GameID) info[GameID].is_usable = True return info[GameID] ftp = FTP() ftp.connect(PS4_IP, 21, timeout=30) ftp.login(user='******', passwd = 'password') if(len(files) == 0) : ftp.cwd('/user/app/') ftp.dir(sort_files) print(files) ftp.cwd('/system_data/priv/mms/') lf = open(app_db, "wb") ftp.retrbinary("RETR app.db" , lf.write) lf.close() conn = sqlite3.connect(app_db) cursor = conn.cursor() cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name LIKE 'tbl_appbrowse_%%';") tables = cursor.fetchall()
self, inFname, destFolder = argv ftp = FTP('ftp.ncbi.nlm.nih.gov') ftp.login() f = open(argv[1], 'r') blastDBListF = open("%s.dbl" % path.basename(argv[1]), 'w') for l in f: fnaCount = 0 link = l.split('\t')[1] org = l.split('\t')[0] if link in ['no_genome', 'na']: continue directory = link.replace('ftp://ftp.ncbi.nlm.nih.gov', '') ftp.cwd(directory) #ftp.retrlines('LIST') files = ftp.nlst() for m in files: if 'genomic.fna.gz' in m: fileLink = "ftp://ftp.ncbi.nlm.nih.gov%s/%s" % (directory, m) fnaCount += 1 print "downloading %s for %s" % (fileLink, org) savedName = "%s/%s_genomic.fna.gz" % ( destFolder.rstrip(), org.replace(' ', '_').replace('/', '_').replace( '(', '_').replace(')', '_')) if not path.isfile(savedName.replace('.gz', '.nin')): urllib.urlretrieve(fileLink, savedName) system('gunzip %s' % savedName) system('formatdb -i %s -p F -o T' % savedName.replace('.gz', '')) else:
def publish_image(self, cr, uid, ids, data, context=None): if context is None: context = {} form = self.browse(cr, uid, ids[0]) wiki_files_conf_id = self.pool.get('wiki.files.conf').search( cr, uid, [('active', '=', 1)]) if not wiki_files_conf_id: raise osv.except_osv(_('Error'), _("Configure your Wiki Files!")) wiki_conf = self.pool.get('wiki.files.conf').browse( cr, uid, wiki_files_conf_id[0]) images = ['jpg', 'gif', 'png'] file_name = form.filename.split('.') if len(file_name) == 0: raise osv.except_osv(_('Error'), _("File name don't have extension.")) filename = slugify(unicode(file_name[0], 'UTF-8')) filename += "." + file_name[1].lower() path = os.path.abspath(os.path.dirname(__file__)) path += '/tmp/' fileurl = wiki_conf.ftpurl if not fileurl[-1] == '/': fileurl += '/' b64_file = form.file full_path = os.path.join(path, filename) #copy local server (tmp dir) ofile = open(full_path, 'w') try: ofile.write(base64.decodestring(b64_file)) finally: ofile.close() #send ftp server ftp = FTP(wiki_conf.ftpip) ftp.login(wiki_conf.ftpusername, wiki_conf.ftppassword) ftp.cwd(wiki_conf.ftpdirectory) f = file(full_path, 'rb') ftp.storbinary('STOR ' + os.path.basename(full_path), f) ftp.quit() #delete local server (tmp dir) try: os.remove(full_path) except: raise osv.except_osv(_('Error'), _("File don't remove local server.")) for data in data['active_ids']: values = { 'file': fileurl + filename, 'media_id': data, } self.pool.get('wiki.media').create(cr, uid, values, context) if filename[-3:] in images: result = 'img:%s%s' % (fileurl, filename) else: result = '[%s%s]' % (fileurl, filename) values = { 'state': 'done', 'result': result, } self.write(cr, uid, ids, values)
class FtpClient(QWidget): def __init__(self, parent=None): super(FtpClient, self).__init__(parent) self.ftp = FTP( ) self.setupGui( ) self.downloads=[ ] self.remote.homeButton.clicked.connect(self.cdToRemoteHomeDirectory) self.remote.fileList.itemDoubleClicked.connect(self.cdToRemoteDirectory) self.remote.fileList.itemClicked.connect(lambda: self.remote.downloadButton.setEnabled(True)) self.remote.backButton.clicked.connect(self.cdToRemoteBackDirectory) self.remote.nextButton.clicked.connect(self.cdToRemoteNextDirectory) self.remote.downloadButton.clicked.connect(lambda: Thread(target=self.download).start()) self.local.homeButton.clicked.connect(self.cdToLocalHomeDirectory) self.local.fileList.itemDoubleClicked.connect(self.cdToLocalDirectory) self.local.fileList.itemClicked.connect(lambda: self.local.uploadButton.setEnabled(True)) self.local.backButton.clicked.connect(self.cdToLocalBackDirectory) self.local.nextButton.clicked.connect(self.cdToLocalNextDirectory) self.local.uploadButton.clicked.connect(lambda: Thread(target=self.upload).start()) self.local.connectButton.clicked.connect(self.connect) self.progressDialog = ProgressDialog(self) def setupGui(self): self.resize(1200, 650) self.local = LocalGuiWidget(self) self.remote = RemoteGuiWidget(self) mainLayout = QtWidgets.QHBoxLayout( ) mainLayout.addWidget(self.remote) mainLayout.addWidget(self.local) mainLayout.setSpacing(0) mainLayout.setContentsMargins(5,5,5,5) self.setLayout(mainLayout) def initialize(self): self.localBrowseRec = [ ] self.remoteBrowseRec = [ ] self.pwd = self.ftp.pwd( ) self.local_pwd = os.getenv('HOME') self.remoteOriginPath = self.pwd self.localOriginPath = self.local_pwd self.localBrowseRec.append(self.local_pwd) self.remoteBrowseRec.append(self.pwd) self.downloadToRemoteFileList( ) self.loadToLocaFileList( ) def disconnect(self): pass def connect(self): try: from urlparse import urlparse except ImportError: from urllib.parse import urlparse result = QtWidgets.QInputDialog.getText(self, 'Connect To Host', 'Host Address', QtWidgets.QLineEdit.Normal) if not result[1]: return try: host = str(result[0].toUtf8()) except AttributeError: host = str(result[0]) try: if urlparse(host).hostname: self.ftp.connect(host=urlparse(host).hostname, port=21, timeout=10) else: self.ftp.connect(host=host, port=21, timeout=10) self.login() except Exception as error: raise error def login(self): ask = loginDialog(self) if not ask: return else: user, passwd = ask[:2] self.ftp.user = user self.ftp.passwd = passwd self.ftp.login(user=user, passwd=passwd) self.initialize( ) ''' def connect(self, address, port=21, timeout=10): from urlparse import urlparse if urlparse(address).hostname: self.ftp.connect(urlparse(address).hostname, port, timeout) else: self.ftp.connect(address, port, timeout) def login(self, name=None, passwd=None): if not name: self.ftp.login( ) else: self.ftp.login(name, passwd) self.ftp.user, self.ftp.passwd = (user, passwd) self.initialize( ) ''' #---------------------------------------------------------------------------------# ## the downloadToRemoteFileList with loadToLocalFileList is doing the same thing ## #---------------------------------------------------------------------------------# def downloadToRemoteFileList(self): """ download file and directory list from FTP Server """ self.remoteWordList = [ ] self.remoteDir = { } self.ftp.dir('.', self.addItemToRemoteFileList) self.remote.completerModel.setStringList(self.remoteWordList) def loadToLocaFileList(self): """ load file and directory list from local computer """ self.localWordList = [ ] self.localDir = { } for f in os.listdir(self.local_pwd): pathname = os.path.join(self.local_pwd, f) self.addItemToLocalFileList(fileProperty(pathname)) self.local.completerModel.setStringList(self.localWordList) def addItemToRemoteFileList(self, content): mode, num, owner, group, size, date, filename = self.parseFileInfo(content) if content.startswith('d'): icon = qIcon('folder.png') pathname = os.path.join(self.pwd, filename) self.remoteDir[ pathname] = True self.remoteWordList.append(filename) else: icon = qIcon('file.png') item = QtWidgets.QTreeWidgetItem( ) item.setIcon(0, icon) for n, i in enumerate((filename, size, owner, group, date, mode)): item.setText(n, i) self.remote.fileList.addTopLevelItem(item) if not self.remote.fileList.currentItem(): self.remote.fileList.setCurrentItem(self.remote.fileList.topLevelItem(0)) self.remote.fileList.setEnabled(True) def addItemToLocalFileList(self, content): mode, num, owner, group, size, date, filename = self.parseFileInfo(content) if content.startswith('d'): icon = qIcon('folder.png') pathname = os.path.join(self.local_pwd, filename) self.localDir[ pathname ] = True self.localWordList.append(filename) else: icon = qIcon('file.png') item = QtWidgets.QTreeWidgetItem( ) item.setIcon(0, icon) for n, i in enumerate((filename, size, owner, group, date, mode)): #print((filename, size, owner, group, date, mode)) item.setText(n, i) self.local.fileList.addTopLevelItem(item) if not self.local.fileList.currentItem(): self.local.fileList.setCurrentItem(self.local.fileList.topLevelItem(0)) self.local.fileList.setEnabled(True) def parseFileInfo(self, file): """ parse files information "drwxr-xr-x 2 root wheel 1024 Nov 17 1993 lib" result like follower "drwxr-xr-x", "2", "root", "wheel", "1024 Nov 17 1993", "lib" """ item = [f for f in file.split(' ') if f != ''] mode, num, owner, group, size, date, filename = ( item[0], item[1], item[2], item[3], item[4], ' '.join(item[5:8]), ' '.join(item[8:])) return (mode, num, owner, group, size, date, filename) #--------------------------# ## for remote file system ## #--------------------------# def cdToRemotePath(self): try: pathname = str(self.remote.pathEdit.text().toUtf8()) except AttributeError: pathname = str(self.remote.pathEdit.text()) try: self.ftp.cwd(pathname) except: return self.cwd = pathname.startswith(os.path.sep) and pathname or os.path.join(self.pwd, pathname) self.updateRemoteFileList( ) self.remote.backButton.setEnabled(True) if os.path.abspath(pathname) != self.remoteOriginPath: self.remote.homeButton.setEnabled(True) else: self.remote.homeButton.setEnabled(False) def cdToRemoteDirectory(self, item, column): pth = u' '.join(item.text(0)).encode('utf-8').strip() pathname = os.path.join(self.pwd, pth) if not self.isRemoteDir(pathname): return self.remoteBrowseRec.append(pathname) self.ftp.cwd(pathname) self.pwd = self.ftp.pwd( ) self.updateRemoteFileList( ) self.remote.backButton.setEnabled(True) if pathname != self.remoteOriginPath: self.remote.homeButton.setEnabled(True) def cdToRemoteBackDirectory(self): pathname = self.remoteBrowseRec[ self.remoteBrowseRec.index(self.pwd)-1 ] if pathname != self.remoteBrowseRec[0]: self.remote.backButton.setEnabled(True) else: self.remote.backButton.setEnabled(False) if pathname != self.remoteOriginPath: self.remote.homeButton.setEnabled(True) else: self.remote.homeButton.setEnabled(False) self.remote.nextButton.setEnabled(True) self.pwd = pathname self.ftp.cwd(pathname) self.updateRemoteFileList( ) def cdToRemoteNextDirectory(self): pathname = self.remoteBrowseRec[self.remoteBrowseRec.index(self.pwd)+1] if pathname != self.remoteBrowseRec[-1]: self.remote.nextButton.setEnabled(True) else: self.remote.nextButton.setEnabled(False) self.remote.backButton.setEnabled(True) if pathname != self.remoteOriginPath: self.remote.homeButton.setEnabled(True) else: self.remote.homeButton.setEnabled(False) self.remote.backButton.setEnabled(True) self.pwd = pathname self.ftp.cwd(pathname) self.updateRemoteFileList( ) def cdToRemoteHomeDirectory(self): self.ftp.cwd(self.remoteOriginPath) self.pwd = self.remoteOriginPath self.updateRemoteFileList( ) self.remote.homeButton.setEnabled(False) #-------------------------# ## for local file system ## #-------------------------# def cdToLocalPath(self): try: pathname = str(self.local.pathEdit.text( ).toUtf8()) except AttributeError: pathname = str(self.local.pathEdit.text()) pathname = pathname.endswith(os.path.sep) and pathname or os.path.join(self.local_pwd, pathname) if not os.path.exists(pathname) and not os.path.isdir(pathname): return else: self.localBrowseRec.append(pathname) self.local_pwd = pathname self.updateLocalFileList( ) self.local.backButton.setEnabled(True) print(pathname, self.localOriginPath) if os.path.abspath(pathname) != self.localOriginPath: self.local.homeButton.setEnabled(True) else: self.local.homeButton.setEnabled(False) def cdToLocalDirectory(self, item, column): pathname = os.path.join(self.local_pwd, str(item.text(0))) if not self.isLocalDir(pathname): return self.localBrowseRec.append(pathname) self.local_pwd = pathname self.updateLocalFileList( ) self.local.backButton.setEnabled(True) if pathname != self.localOriginPath: self.local.homeButton.setEnabled(True) def cdToLocalBackDirectory(self): pathname = self.localBrowseRec[ self.localBrowseRec.index(self.local_pwd)-1 ] if pathname != self.localBrowseRec[0]: self.local.backButton.setEnabled(True) else: self.local.backButton.setEnabled(False) if pathname != self.localOriginPath: self.local.homeButton.setEnabled(True) else: self.local.homeButton.setEnabled(False) self.local.nextButton.setEnabled(True) self.local_pwd = pathname self.updateLocalFileList( ) def cdToLocalNextDirectory(self): pathname = self.localBrowseRec[self.localBrowseRec.index(self.local_pwd)+1] if pathname != self.localBrowseRec[-1]: self.local.nextButton.setEnabled(True) else: self.local.nextButton.setEnabled(False) if pathname != self.localOriginPath: self.local.homeButton.setEnabled(True) else: self.local.homeButton.setEnabled(False) self.local.backButton.setEnabled(True) self.local_pwd = pathname self.updateLocalFileList( ) def cdToLocalHomeDirectory(self): self.local_pwd = self.localOriginPath self.updateLocalFileList( ) self.local.homeButton.setEnabled(False) def updateLocalFileList(self): self.local.fileList.clear( ) self.loadToLocaFileList( ) def updateRemoteFileList(self): self.remote.fileList.clear( ) self.downloadToRemoteFileList( ) def isLocalDir(self, dirname): return self.localDir.get(dirname, None) def isRemoteDir(self, dirname): return self.remoteDir.get(dirname, None) def download(self): global select_item item = self.remote.fileList.currentItem( ) filesize = int(item.text(1)) try: # corrigir aqui nao string #p.agent_info = u' '.join((item.text(0))).encode('utf-8').strip src1 = u' '.join(item.text(0)).encode('utf-8').strip() srcfile = os.path.join(self.pwd, src1) dst1=' '.join(select_item.text(0)).encode('utf-8').strip() dstfile = os.path.join(self.local_pwd, dst1) except AttributeError: srcfile = os.path.join(self.pwd, src1) dstfile = os.path.join(self.local_pwd, dst1) pb = self.progressDialog.addProgress( type='download', title=srcfile, size=filesize, ) def callback(data): pb.set_value(data) file.write(data) file = open(dstfile, 'wb') fp = FTP( ) fp.connect(host=self.ftp.host, port=self.ftp.port, timeout=self.ftp.timeout) fp.login(user=self.ftp.user, passwd=self.ftp.passwd) fp.retrbinary(cmd='RETR '+srcfile, callback=callback) def upload(self): #upload tambem definir path global select_item item = self.local.fileList.currentItem( ) filesize = int(item.text(1)) try: srcUp1 = u' '.join(item.text(0)).encode('utf-8').strip() srcfile = os.path.join(self.local_pwd, srcUp1) dsUp1 = u' '.join(select_item.text(0)).encode('utf-8').strip() dstfile = os.path.join(self.pwd, dsUp1) except AttributeError: srcfile = os.path.join(self.local_pwd, srcUp1) dstfile = os.path.join(self.pwd, dsUp1) pb = self.progressDialog.addProgress( type='upload', title=srcfile, size=filesize, ) file = open(srcfile, 'rb') fp = FTP( ) fp.connect(host=self.ftp.host, port=self.ftp.port, timeout=self.ftp.timeout) fp.login(user=self.ftp.user, passwd=self.ftp.passwd) fp.storbinary(cmd='STOR '+dstfile, fp=file, callback=pb.set_value)
class smftp: # def __init__(self, hostaddr, username, password, remotedir, port=21): def __init__(self, hostaddr, username, password, port=21): self.hostaddr = hostaddr self.username = username self.password = password self.port = port self.ftp = FTP() self.file_list = [] # self.ftp.set_debuglevel(2) def __del__(self): self.ftp.close() # self.ftp.set_debuglevel(0) def login(self): ftp = self.ftp connected = False try: timeout = 20 socket.setdefaulttimeout(timeout) # ftp.set_pasv(True) ftp.set_pasv(False) # print u'开始连接到 %s' %(self.hostaddr) ftp.connect(self.hostaddr, self.port) # print u'成功连接到 %s' %(self.hostaddr) # print u'开始登录到 %s' %(self.hostaddr) ftp.login(self.username, self.password) # print u'成功登录到 %s' %(self.hostaddr) # debug_print(ftp.getwelcome()) connected = True except Exception: common.log_err('Connecting Or Login Failed on %s' % self.hostaddr) return connected # try: # ftp.cwd(self.remotedir) # pass # except(Exception): # common.log_err( '切换目录失败' ) def is_same_size(self, localfile, remotefile): return 0 pass # try: # remotefile_size = self.ftp.size(remotefile) # except: # remotefile_size = -1 # try: # localfile_size = os.path.getsize(localfile) # except: # localfile_size = -1 # debug_print('localfile_size:%d remotefile_size:%d' %(localfile_size, remotefile_size),) # if remotefile_size == localfile_size: # return 1 # else: # return 0 def download_file(self, localfile, remotefile): # if self.is_same_size(localfile, remotefile): # common.log_info('%s 文件大小相同,无需下载' %localfile) # return # else: # common.log_info('>>>>>>>>>>>>下载文件 %s <<<<<<<<<<<<<<' %localfile) # return file_handler = open(localfile, 'wb') self.ftp.retrbinary(u'RETR %s' % (remotefile), file_handler.write) file_handler.close() def listFiles(self): self.file_list = [] self.ftp.dir(self.get_file_list) remotenames = [] for entry in self.file_list: remotenames.append(entry[1]) # remotenames = self.file_list return remotenames def download_files(self, localdir='./', remotedir='./'): try: # self.ftp.cwd(remotedir) pass except: # debug_print(u'目录%s不存在,继续...' %remotedir) return if not os.path.isdir(localdir): os.makedirs(localdir) common.log_info('切换至目录 %s' % self.ftp.pwd()) self.file_list = [] self.ftp.dir(self.get_file_list) remotenames = self.file_list # print(remotenames) # return for item in remotenames: filetype = item[0] filename = item[1] local = os.path.join(localdir, filename) if filetype == 'd': self.download_files(local, filename) elif filetype == '-': self.download_file(local, filename) self.ftp.cwd('..') common.log_info('返回上层目录 %s' % self.ftp.pwd()) def delete_file(self, remotefile): self.ftp.delete(remotefile) # common.log_info('已删除: %s' %remotefile) def upload_file(self, localfile, remotefile): if not os.path.isfile(localfile): return # if self.is_same_size(localfile, remotefile): # debug_print(u'跳过[相等]: %s' %localfile) # return file_handler = open(localfile, 'rb') self.ftp.storbinary('STOR %s' % remotefile, file_handler) file_handler.close() # common.log_info('已传送: %s' %localfile) def upload_files(self, localdir='./', remotedir='./'): if not os.path.isdir(localdir): return localnames = os.listdir(localdir) # self.ftp.cwd(remotedir) for item in localnames: src = os.path.join(localdir, item) if os.path.isdir(src): try: self.ftp.mkd(item) except: common.log_info('目录已存在 %s' % item) self.upload_files(src, item) else: self.upload_file(src, item) self.ftp.cwd('..') def get_file_list(self, line): ret_arr = [] # print line file_arr = self.get_filename(line) if file_arr[1] not in ['.', '..']: self.file_list.append(file_arr) def get_filename(self, line): pos = line.rfind(':') while (line[pos] != ' '): pos += 1 while (line[pos] == ' '): pos += 1 file_arr = [line[0], line[pos:]] return file_arr
def print_line(f): if 'books.test' not in f and f not in existing: to_upload.add(f) def read_block(block): global data data += block ftp = FTP(host) ftp.set_pasv(False) welcome = ftp.getwelcome() ftp.login(c['lc_update_user'], c['lc_update_pass']) ftp.cwd('/emds/books/all') ftp.retrlines('NLST', print_line) if to_upload: print(welcome) else: ftp.close() sys.exit(0) bad = open(c['log_location'] + 'lc_marc_bad_import', 'a') def iter_marc(data): pos = 0 while pos < len(data): length = data[pos:pos + 5]
def ftp_download(self, safeoutfile): ftp = FTP() # connect if self.verbose: self.write('Connecting to %s\n' % self.host) # TODO: add its IP address after the host try: ftp.connect(host=self.host, port=self.port, timeout=30) except Exception as e: log.msg('FTP connect failed: host=%s, port=%s, err=%s' % (self.host, self.port, str(e))) self.write( 'ftpget: can\'t connect to remote host: Connection refused\n') return False # login if self.verbose: self.write('ftpget: cmd (null) (null)\n') if self.username: self.write('ftpget: cmd USER %s\n' % self.username) else: self.write('ftpget: cmd USER anonymous\n') if self.password: self.write('ftpget: cmd PASS %s\n' % self.password) else: self.write('ftpget: cmd PASS busybox@\n') try: ftp.login(user=self.username, passwd=self.password) except Exception as e: log.msg('FTP login failed: user=%s, passwd=%s, err=%s' % (self.username, self.password, str(e))) self.write('ftpget: unexpected server response to USER: %s\n' % str(e)) try: ftp.quit() except socket.timeout: pass return False # download if self.verbose: self.write('ftpget: cmd TYPE I (null)\n') self.write('ftpget: cmd PASV (null)\n') self.write('ftpget: cmd SIZE %s\n' % self.remote_path) self.write('ftpget: cmd RETR %s\n' % self.remote_path) try: ftp.cwd(self.remote_dir) ftp.retrbinary('RETR %s' % self.remote_file, open(safeoutfile, 'wb').write) except Exception as e: log.msg('FTP retrieval failed: %s' % str(e)) self.write('ftpget: unexpected server response to USER: %s\n' % str(e)) try: ftp.quit() except socket.timeout: pass return False # quit if self.verbose: self.write('ftpget: cmd (null) (null)\n') self.write('ftpget: cmd QUIT (null)\n') try: ftp.quit() except socket.timeout: pass return True
### IMPORT ### import ftplib from ftplib import FTP import os import gzip import shutil ### MAIN ### filenames = [] data = [] path = "./1_Metazoa/WGS/" ftp = FTP() ftp = ftplib.FTP('ftp.ensemblgenomes.org') ftp.login() ftp.cwd('/pub/metazoa/release-44/fasta/') def get_dirs_ftp(folder=""): print ('getting folders...') contents = ftp.nlst(folder) folders = [] for item in contents: if "." not in item and 'ancestral' not in item: folders.append(item) return folders folders_list = get_dirs_ftp() n = 0 for folder in folders_list:
def get_most_recent_file_name(ftpsite, ftpdir): ftp = FTP(ftpsite) ftp.login() ftp.cwd(ftpdir) file_name = sorted(ftp.nlst(), key=lambda x: ftp.voidcmd(f"MDTM {x}"))[-1] return file_name
''') # 'files' is the table for all met files available remotely # 'remote' should be all 1 # 'local' are those available locally # 'needed_for_month' is the subset needed to process the month in question for date in site_dates: date = datetime.strftime(date, '%Y-%m-%d %H:%M:%S') # convert to string for DB db_cur.execute('''INSERT OR IGNORE INTO trajectories (traj_date, processed, attempted) VALUES ( ?, ?, ? )''', ( date, 0, 0) ) # place date and label as un-processed # file name will be added when it's processed and saved ftp_con = FTP('arlftp.arlhq.noaa.gov') # connect to NOAA's FTP server ftp_con.login() # login as anonymous and move to correct dir ftp_con.cwd(remote_hrrr_dir) def get_hrrra_file_list(conn): ''' This function takes one FTP connection (to the ARL Server) and returns a list of all the hrrr met files in that connection's cwd ''' remote_out = [] conn.dir(remote_out.append) remote_files = [] for line in remote_out: if 'hrrra' in line: remote_files.append(line.split(' ')[-1]) return remote_files def parse_files_for_dates(met_file_list):
class FTPClient(): """ FTP Client to connect with FTP servers """ def __init__(self, host, port, username, password): if port == None: port = 21 self.client = FTP() self.client.connect(host, port) self.client.login(username, password) def delete_file(self, file_path): if file_path[0] != '/': remote_path = '/' + file_path else: remote_path = file_path self.client.delete(remote_path) def delete_folder(self, folder_id): if folder_id[0] != '/': path = '/' + folder_id else: path = folder_id for file in self.client.nlst(path): self.client.delete(file) def download_folder(self, src_folder, dst_folder): if src_folder[0] != '/': src_path = '/' + src_folder else: src_path = src_folder os.makedirs(dst_folder, exist_ok=True) for f in self.client.nlst(src_folder): f_path = f.split('/') file_name = f_path[len(f_path) - 1] try: file = open(dst_folder + '/' + file_name, 'wb+') self.client.retrbinary('RETR ' + src_folder + '/' + file_name, file.write) file.close() except Exception: print(src_folder + '/' + file_name + " not a file.") def upload_file(self, src_file, dst_file): split_dst_file = dst_file.split('/') remote_path = "" for i in range(len(split_dst_file) - 1): remote_path += "/" + split_dst_file[i] try: self.client.cwd(remote_path) except: for i in range(len(split_dst_file) - 1): try: self.client.cwd(split_dst_file[i]) except: self.client.mkd(split_dst_file[i]) self.client.cwd(split_dst_file[i]) file = open(src_file, 'rb') self.client.storbinary('STOR ' + split_dst_file[len(split_dst_file) - 1], file) file.close() self.client.cwd("/") def upload_files(self, folder_id, selected_chunks, folder_chunks, do_tar=False, do_compress=False): if folder_id[0] != '/': remote_folder = '/' + folder_id else: remote_folder = folder_id split_dst_file = remote_folder.split('/') remote_path = "" for i in range(len(split_dst_file) - 1): remote_path += "/" + split_dst_file[i] try: self.client.cwd(remote_path) except: for i in range(len(split_dst_file) - 1): try: self.client.cwd(split_dst_file[i]) except: self.client.mkd(split_dst_file[i]) self.client.cwd(split_dst_file[i]) if do_tar: if do_compress: ext = '.tgz' verb = 'w:gz' else: ext = '.tar' verb = 'w' folder_id_path = folder_id.split('/') folder_id_short = folder_id_path[len(folder_id_path) - 1] folder = '/tmp/' + folder_id_short for chunk in selected_chunks: copyfile(folder_chunks + '/' + chunk, folder) folder_compress = '/tmp/' + folder_id_short + ext with tarfile.open(folder_compress, verb) as tar: tar.add(folder, recursive=True) tar.close() print(folder_compress, '/' + folder_id + '/' + folder_id_short + ext) file = open(folder_compress, 'rb') self.client.storbinary('STOR ' + folder_id_short + ext, file) file.close() else: for chunk in selected_chunks: file = open(folder_chunks + '/' + chunk, 'rb') self.client.storbinary('STOR ' + chunk, file) file.close() self.client.cwd("/") def download_file(self, folder_id, selected_chunk, output_folder): if folder_id == '': file_path = selected_chunk else: file_path = folder_id + '/' + selected_chunk file = open(output_folder + '/' + selected_chunk, 'wb+') self.client.retrbinary('RETR ' + file_path, file.write) file.close() def upload_folder(self, dst_folder, src_folder, do_tar=False, do_compress=False): dst_folder = dst_folder.strip('/') split_dst_file = dst_folder.split('/') try: self.client.cwd(dst_folder) except: for i in range(len(split_dst_file)): try: self.client.cwd(split_dst_file[i]) except: self.client.mkd(split_dst_file[i]) self.client.cwd(split_dst_file[i]) print('DoTar {}, DoCompress {}'.format(do_tar, do_compress)) if do_tar: if do_compress: ext = '.tgz' verb = 'w:gz' else: ext = '.tar' verb = 'w' folder_compress = '/tmp/result{}'.format(ext) print('Compressing to {}'.format(folder_compress)) with tarfile.open(folder_compress, verb) as tar: tar.add(src_folder, arcname=dst_folder, recursive=True) tar.close() file = open(folder_compress, 'rb') self.client.storbinary('STOR ' + 'result' + ext, file) file.close() else: dir = os.fsencode(src_folder) for f in os.listdir(dir): filePath = src_folder + '/' + f.decode('utf-8') if not os.path.isdir(filePath): file = open(filePath, 'rb') self.client.storbinary('STOR ' + f.decode('utf-8'), file) file.close() self.client.cwd("/") def list_files_folder(self, folder): if folder[0] != '/': remote_folder = '/' + folder else: remote_folder = folder self.client.cwd(remote_folder) return self.list_files() def list_files(self): files = [] for file_info in self.client.mlsd(): if file_info[1]['type'] == 'file': files.append(file_info[0]) return files def get_file_size(self, filename): try: if filename[0] != '/': filename = '/' + filename return self.client.size(filename) except: return -1
def strmFile(self, i): try: name, title, year, imdb, tmdb, tvdb, tvrage, season, episode, tvshowtitle, alter, date = i[ 'name'], i['title'], i['year'], i['imdb'], i['tmdb'], i[ 'tvdb'], i['tvrage'], i['season'], i['episode'], i[ 'tvshowtitle'], i['alter'], i['date'] episodename, episodetitle = urllib.quote_plus( name), urllib.quote_plus(title) systitle, syspremiered = urllib.quote_plus( tvshowtitle), urllib.quote_plus(date) transname = name.translate(None, '\/:*?"<>|').strip('.') transtitle = tvshowtitle.translate(None, '\/:*?"<>|').strip('.') transseason = 'Season %s' % season.translate( None, '\/:*?"<>|').strip('.') content = '%s?action=play&name=%s&title=%s&year=%s&imdb=%s&tmdb=%s&tvdb=%s&tvrage=%s&season=%s&episode=%s&tvshowtitle=%s&alter=%s&date=%s' % ( sys.argv[0], episodename, episodetitle, year, imdb, tmdb, tvdb, tvrage, season, episode, systitle, alter, syspremiered) control.makeFile(self.library_folder) folder = os.path.join(self.library_folder, transtitle) control.makeFile(folder) try: if not 'ftp://' in folder: raise Exception() from ftplib import FTP ftparg = re.compile( 'ftp://(.+?):(.+?)@(.+?):?(\d+)?/(.+/?)').findall(folder) ftp = FTP(ftparg[0][2], ftparg[0][0], ftparg[0][1]) try: ftp.cwd(ftparg[0][4]) except: ftp.mkd(ftparg[0][4]) ftp.quit() except: pass folder = os.path.join(folder, transseason) control.makeFile(folder) try: if not 'ftp://' in folder: raise Exception() from ftplib import FTP ftparg = re.compile( 'ftp://(.+?):(.+?)@(.+?):?(\d+)?/(.+/?)').findall(folder) ftp = FTP(ftparg[0][2], ftparg[0][0], ftparg[0][1]) try: ftp.cwd(ftparg[0][4]) except: ftp.mkd(ftparg[0][4]) ftp.quit() except: pass stream = os.path.join(folder, transname + '.strm') file = control.openFile(stream, 'w') file.write(str(content)) file.close() except: pass
#!/usr/bin/python3 ## Example ./moveItems.sh 192.168.10.35 turnip test fname from ftplib import FTP import sys import pdb filename = str(sys.argv[4]) ftp = FTP(str(sys.argv[1])) ftp.login(user=str(sys.argv[2]), passwd=str(sys.argv[3])) ftp.cwd('items') # change into user dir with open(filename, 'rb') as fp: ftp.storlines('STOR ' + filename, fp) ftp.sendcmd('SITE CHMOD 644 ' + filename)
def handle(self, *args, **options): from django.conf import settings from accounts.choices import COUNTRY_CHOICES from rent.models import Booking log.info('Starting daily insurance subscriptions batch') csv_file = TemporaryFile() latin1csv_file = codecs.EncodedFile(csv_file, 'utf-8', 'latin1', 'ignore') writer = csv.writer(latin1csv_file, delimiter='|') period = (date.today() - timedelta(days=100)) for booking in Booking.objects.pending().filter(created_at__year=period.year, created_at__month=period.month, created_at__day=period.day): row = SortedDict() row['Numéro locataire'] = booking.borrower.pk row['Login locataire'] = booking.borrower.username row['Adresse email'] = booking.borrower.email phones = tuple(booking.borrower.phones.all()[:1]) phone = phones[0] if phones else None row['Téléphone locataire'] = phone row['Portable locataire'] = phone row['Nom'] = smart_str(booking.borrower.last_name.replace("\n", " ").replace("\r", " ")) row[u'Prénom'] = smart_str(booking.borrower.first_name.replace("\n", " ").replace("\r", " ")) for address in booking.borrower.addresses.all()[:1]: row['Adresse 1'] = smart_str(address.address1.replace("\n", " ").replace("\r", " ")) row['Adresse 2'] = smart_str(address.address2.replace("\n", " ").replace("\r", " ")) if address.address2 else None row['Code postal'] = address.zipcode.replace("\n", " ").replace("\r", " ") row['Ville'] = smart_str(address.city.replace("\n", " ").replace("\r", " ")) row['Pays'] = COUNTRY_CHOICES[address.country] break else: row['Adresse 1'] = \ row['Adresse 2'] = \ row['Code postal'] = \ row['Ville'] = \ row['Pays'] = \ row['Numéro propriétaire'] = smart_str(booking.owner.pk) row['Login propriétaire'] = smart_str(booking.owner.username) row['Adresse email propriétaire'] = booking.owner.email phones = tuple(booking.owner.phones.all()[:1]) phone = phones[0] if phones else None row['Téléphone propriétaire'] = phone row['Portable propriétaire'] = phone row['Nom propriétaire'] = smart_str(booking.owner.last_name.replace("\n", " ").replace("\r", " ")) row[u'Prénom propriétaire'] = smart_str(booking.owner.first_name.replace("\n", " ").replace("\r", " ")) for address in booking.owner.addresses.all()[:1]: row['Adresse 1 propriétaire'] = smart_str(address.address1.replace("\n", " ").replace("\r", " ")) row['Adresse 2 propriétaire'] = smart_str(address.address2.replace("\n", " ").replace("\r", " ") if address.address2 else None) row['Code postal propriétaire'] = address.zipcode.replace("\n", " ").replace("\r", " ") row['Ville propriétaire'] = smart_str(address.city.replace("\n", " ").replace("\r", " ")) row['Pays propriétaire'] = COUNTRY_CHOICES[address.country] break else: row['Adresse 1 propriétaire'] = \ row['Adresse 2 propriétaire'] = \ row['Code postal propriétaire'] = \ row['Ville propriétaire'] = \ row['Pays propriétaire'] = None row['Numéro police'] = settings.POLICY_NUMBER row['Numéro partenaire'] = settings.PARTNER_NUMBER row['Numéro contrat'] = 500000 + booking.contract_id row['Date d\'effet de la location'] = booking.started_at.strftime("%Y%m%d") row[u'Numéro de commande'] = booking.uuid try: product = booking.product row['Type de produit'] = smart_str(product._get_category().name) row[u'Désignation'] = smart_str(product.description.replace("\n", " ").replace("\r", " ")) row['Informations complémentaires produit'] = smart_str(product.summary.replace("\n", " ").replace("\r", " ")) except ObjectDoesNotExist: row['Type de produit'] = \ row[u'Désignation'] = \ row['Informations complémentaires produit'] = None row['Prix de la location TTC'] = comma_separated(booking.total_amount) row['Montant de la Caution'] = comma_separated(booking.deposit_amount) row[u'Durée de garantie'] = (booking.ended_at - booking.started_at).days try: row[u'Prix de cession de l\'assurance HT'] = comma_separated(round(booking.insurance_fee, 2)) row['Com. du partenaire'] = comma_separated(round(booking.insurance_commission, 2)) row['Taxes assurance à 9%'] = comma_separated(round(booking.insurance_taxes, 2)) except ObjectDoesNotExist: row[u'Prix de cession de l\'assurance HT'] = \ row['Com. du partenaire'] = \ row['Taxes assurance à 9%'] = None row['Cotisation TTC'] = comma_separated(round(booking.insurance_amount, 2)) writer.writerow(row.values()) latin1csv_file.seek(0) log.info('Uploading daily insurance subscriptions') ftp = FTP(settings.INSURANCE_FTP_HOST) ftp.login(settings.INSURANCE_FTP_USER, settings.INSURANCE_FTP_PASSWORD) # set FTP PASSIVE mode; disabled by default ftp.set_pasv(getattr(settings, 'INSURANCE_FTP_PASSIVE_MODE', 0)) if settings.INSURANCE_FTP_CWD: ftp.cwd(settings.INSURANCE_FTP_CWD) ftp.storlines("STOR subscriptions-eloue-%s-%s-%s.csv" % (period.year, period.month, period.day), latin1csv_file) ftp.quit() log.info('Finished daily insurance subscriptions batch')
from ftplib import FTP import requests import datetime ftp = FTP('address') ftp.login('login', 'pass') ftp.cwd('logs') ftp.retrbinary('RETR event.txt', open('event_edv3.txt', 'wb').write) ftp.quit() url = 'http://url:port/write?db=dbname' eventsList = open("event_edv3.txt", "r") event = list(eventsList) eventsList.close() for n in event[8:]: if 'user' not in n: latestEvent = n.split('\r\n')[0] savedEvent = list(open('eventLog3.txt', 'r'))[0].replace('\t', ' ').split('\r\n')[0] if latestEvent != savedEvent: handle = open('eventLog3.txt', 'w') handle.write(latestEvent) handle.close() requests.post( url, auth=('login', 'pass'), data='ups_edv3,edv=3 latestEvent="{0}"'.format(latestEvent)) else: handle = open('eventLog3.txt', 'w') handle.write(str(datetime.datetime.now) + "no news is good news")