Esempio n. 1
0
 def reconnect(self):
     self._connection = ftplib.FTP(self.host,
                                   self.user,
                                   self.password,
                                   timeout=5)
Esempio n. 2
0
    timeout = 1800
    socket.setdefaulttimeout(timeout)

    # FTP: ftp://ftp.ncbi.nlm.nih.gov/genomes/Viruses/Enterobacteria_phage_phiX174_sensu_lato_uid14015/NC_001422.fna
    # web: http://www.ncbi.nlm.nih.gov/nuccore/9626372

    url = 'genomes/Viruses/Enterobacteria_phage_phiX174_sensu_lato_uid14015/'
    version = ""
    #mypath = os.path.join(options.output_directory,'phix174_index')
    mypath = options.output_directory
    if not os.path.isdir(mypath):
        os.makedirs(mypath)

    print "Downloading the 'Enterobacteria phage phiX174' genome from NCBI server!"
    try:
        ftp = ftplib.FTP(options.server)
        print ftp.login()
        ftp.cwd(url)

        list_files = ftp.nlst()

        list_files = [el for el in list_files if el.lower() == 'nc_001422.fna']

        if len(list_files) == 1:
            new_files = []
            for filename in list_files:
                print "Downloading: %s/%s/%s" % (options.server,url,filename)
                nf = os.path.join(mypath,filename)
                new_files.append(nf)
                fid = open(nf,'wb')
                ftp.retrbinary("RETR " + filename, fid.write)
Esempio n. 3
0
class myFtp:
    ftp = ftplib.FTP()
    bIsDir = False
    path = ""

    def connetc(self, host='', port=21, timeout=-999):
        self.ftp.connect(self, host, port, timeout)

    def getwelcome(self):
        self.ftp.getwelcome()

    def cwd(self, dirname):
        res = self.ftp.cwd(dirname)
        return res

    def dir(self, *args):
        self.ftp.dir(*args)

    def __init__(self, host, port='21'):
        # self.ftp.set_debuglevel(2) # 打开调试级别2,显示详细信息
        # self.ftp.set_pasv(0)      # 0主动模式 1 #被动模式
        self.ftp.connect(host, port)

    def Login(self, user, passwd):
        self.ftp.login(user, passwd)
        logging.info(self.ftp.welcome)

    def DownLoadFile(self, LocalFile, RemoteFile):
        file_handler = open(LocalFile, 'wb')
        self.ftp.retrbinary("RETR %s" % (RemoteFile), file_handler.write)
        file_handler.close()
        return True

    def UpLoadFile(self, LocalFile, RemoteFile):
        if os.path.isfile(LocalFile) is False:
            return False
        file_handler = open(LocalFile, "rb")
        self.ftp.storbinary('STOR %s' % RemoteFile, file_handler, 4096)
        file_handler.close()
        return True

    def UpLoadFileTree(self, LocalDir, RemoteDir):
        if os.path.isdir(LocalDir) is False:
            return False
        logging.info("LocalDir: %s" % LocalDir)
        LocalNames = os.listdir(LocalDir)
        print "list:", LocalNames

        print RemoteDir
        self.ftp.cwd(RemoteDir)
        for Local in LocalNames:
            src = os.path.join(LocalDir, Local)
            if os.path.isdir(src):
                self.UpLoadFileTree(src, Local)
            else:
                self.UpLoadFile(src, Local)
        self.ftp.cwd("..")
        return

    def DownLoadFileTree(self, LocalDir, RemoteDir):
        print "remoteDir:", RemoteDir
        if os.path.isdir(LocalDir) is False:
            os.makedirs(LocalDir)
        self.ftp.cwd(RemoteDir)
        RemoteNames = self.ftp.nlst()
        print "RemoteNames", RemoteNames
        print self.ftp.nlst("/del1")
        for filee in RemoteNames:
            Local = os.path.join(LocalDir, filee)
            if self.isDir(filee):
                self.DownLoadFileTree(Local, filee)
            else:
                self.DownLoadFile(Local, filee)
        self.ftp.cwd("..")
        return

    def show(self, listt):
        result = listt.lower().split(" ")
        if self.path in result and "<dir>" in result:
            self.bIsDir = True

    def isDir(self, path):
        self.bIsDir = False
        self.path = path
        # this ues callback function ,that will change bIsDir value
        self.ftp.retrlines('LIST', self.show)
        return self.bIsDir

    def retrlines(self, cmd, callback=None):
        res = self.ftp.retrlines(cmd, callback)
        return res

    def mkd(self, dirname):
        resStr = self.ftp.mkd(dirname)
        return resStr

    def rmd(self, dirname):
        resStr = self.ftp.rmd(dirname)
        return resStr

    def delete(self, filename):
        resStr = self.ftp.delete(filename)
        return resStr

    def pwd(self):
        resStr = self.ftp.pwd()
        return resStr

    def nlst(self):
        resStr = self.ftp.nlst()
        return resStr

    def close(self):
        self.ftp.quit()
Esempio n. 4
0
def _get_IONEX_file(
        time="2012/03/23/02:20:10.01",
        server="ftp://cddis.gsfc.nasa.gov/gnss/products/ionex/",
        prefix="codg",
        outpath='./',
        overwrite=False,
        backupserver="ftp://cddis.gsfc.nasa.gov/gnss/products/ionex/"):
    """Get IONEX file with prefix from server for a given day

    Downloads files with given prefix from the ftp server, unzips and stores
    the data. For prefix ROBR the data is stored on the server in a separate
    IONEX file every 15 minutes, these are automatically combined for
    compatibility.

    Args:
        time (string or list) : date of the observation
        server (string) : ftp server + path to the ionex directories
        prefix (string) : prefix of the IONEX files (case insensitive)
        outpath (string) : path where the data is stored
        overwrite (bool) : Do (not) overwrite existing data
    """
    prefix = prefix.upper()
    if outpath[-1] != "/":
        outpath += "/"
    if not os.path.isdir(outpath):
        try:
            os.makedirs(outpath)
        except:
            logging.error("cannot create output dir for IONEXdata: %s",
                          outpath)

    try:
        yy = time[2:4]
        year = int(time[:4])
        month = int(time[5:7])
        day = int(time[8:10])
    except:
        year = time[0]
        yy = year - 2000
        month = time[1]
        day = time[2]
    mydate = datetime.date(year, month, day)
    dayofyear = mydate.timetuple().tm_yday
    #if file exists just return filename
    if not overwrite and os.path.isfile("%s%s%03d0.%02dI" %
                                        (outpath, prefix, dayofyear, yy)):
        logging.info("FILE exists: %s%s%03d0.%02dI", outpath, prefix,
                     dayofyear, yy)
        return "%s%s%03d0.%02dI" % (outpath, prefix, dayofyear, yy)
    #check if IGRG (fast files) exist, use those instead (UGLY!!)
    if not overwrite and os.path.isfile("%sIGRG%03d0.%02dI" %
                                        (outpath, dayofyear, yy)):
        logging.info("fast FILE exists: %sIGRG%03d0.%02dI", outpath, dayofyear,
                     yy)
        return "%sIGRG%03d0.%02dI" % (outpath, dayofyear, yy)

    tried_backup = False
    serverfound = False
    while not serverfound:
        ftpserver = server.replace("ftp:", "").strip("/").split("/")[0]
        ftppath = "/".join(
            server.replace("ftp:", "").strip("/").split("/")[1:])
        nr_tries = 0
        try_again = True
        while try_again and nr_tries < 10:
            try:
                ftp = ftplib.FTP(ftpserver)
                ftp.login()
                try_again = False
                serverfound = True
            except ftplib.error_perm:
                if "213.184.6.172" in server:
                    ftp.login("data-out", "Qz8803#mhR4z")
                    try_again = False
                    serverfound = True
                else:
                    try_again = True
                    nr_tries += 1
                    if nr_tries >= 10:
                        if tried_backup or server == backupserver:
                            raise Exception("Could not connect to %s" %
                                            ftpserver)
                        else:
                            server = backupserver
                            tried_backup = True
            except socket.gaierror:
                try_again = True
                nr_tries += 1
                if nr_tries >= 10:
                    if tried_backup or server == backupserver:
                        raise Exception("Could not connect to %s" % ftpserver)
                    else:
                        server = backupserver
                        tried_backup = True
    ftp.cwd(ftppath)
    totpath = ftppath
    myl = []
    ftp.retrlines("NLST", myl.append)
    logging.info("Retrieving data for %d or %02d%03d", year, yy, dayofyear)
    if str(year) in myl:
        ftp.cwd(str(year))
        totpath += "/%d" % (year)
    elif "%02d%03d" % (yy, dayofyear) in myl:
        ftp.cwd("%02d%03d" % (yy, dayofyear))
        totpath += "/%02d%03d" % (yy, dayofyear)
    myl = []
    ftp.retrlines("NLST", myl.append)
    if "%03d" % dayofyear in myl:
        ftp.cwd("%03d" % dayofyear)
        totpath += "/%03d" % (dayofyear)
    logging.info("Retrieving data from %s", totpath)
    myl = []
    ftp.retrlines("NLST", myl.append)
    filenames = [
        i for i in myl
        if (prefix.lower() in i.lower()) and ("%03d" % dayofyear in i.lower())
        and (i.lower().endswith("i.z") or i.lower().endswith("i"))
    ]
    logging.info(" ".join(filenames))
    #assert len(filenames) > 0, "No files found on %s for %s" % (server,prefix)
    if len(filenames) <= 0:
        logging.info("No files found on %s for %s", server, prefix)
        return -1

    if prefix.lower() == "robr" and len(filenames) > 1:
        filenames = sorted(filenames)
        filenames = _store_files(ftp, filenames, outpath, overwrite)
        # get data for next day
        nextday = mydate + datetime.timedelta(days=1)
        nyear = nextday.year
        ndayofyear = nextday.timetuple().tm_yday
        ftp.cwd("/" + ftppath)
        myl = []
        ftp.retrlines("NLST", myl.append)
        if str(nyear) in myl:
            ftp.cwd(str(nyear))
        myl = ftp.retrlines("NLST")
        if str(ndayofyear) in myl:
            ftp.cwd(str(ndayofyear))
        myl = ftp.retrlines("NLST")
        nfilenames = [
            i for i in myl if (prefix.lower() in i.lower()) and (
                i.lower().endswith("i.z")) and "A00" in i.upper()
        ]
        nfilenames = _store_files(ftp, nfilenames, outpath, overwrite)
        filenames += nfilenames
        _combine_ionex(outpath, filenames,
                       prefix + "%03d0.%sI" % (dayofyear, yy))
        ftp.quit()
        return os.path.join(outpath, prefix + "%03d0.%sI" % (dayofyear, yy))
    else:
        nfilenames = _store_files(ftp, filenames, outpath, overwrite)
        ftp.quit()
        return nfilenames[0]
Esempio n. 5
0
    def upload_ftp(self, url=None, directory=None, user=None, password=None):
        """Attempts to upload the archive to either the policy defined or user
        provided FTP location.

        :param url: The URL to upload to
        :type url: ``str``

        :param directory: The directory on the FTP server to write to
        :type directory: ``str`` or ``None``

        :param user: The user to authenticate with
        :type user: ``str``

        :param password: The password to use for `user`
        :type password: ``str``

        :returns: ``True`` if upload is successful
        :rtype: ``bool``

        :raises: ``Exception`` if upload in unsuccessful
        """
        try:
            import ftplib
            import socket
        except ImportError:
            # socket is part of the standard library, should only fail here on
            # ftplib
            raise Exception("missing python ftplib library")

        if not url:
            url = self.get_upload_url()
        if url is None:
            raise Exception("no FTP server specified by policy, use --upload-"
                            "url to specify a location")

        url = url.replace('ftp://', '')

        if not user:
            user = self.get_upload_user()

        if not password:
            password = self.get_upload_password()

        if not directory:
            directory = self.upload_directory or self._upload_directory

        try:
            session = ftplib.FTP(url, user, password, timeout=15)
            if not session:
                raise Exception("connection failed, did you set a user and "
                                "password?")
            session.cwd(directory)
        except socket.timeout:
            raise Exception("timeout hit while connecting to %s" % url)
        except socket.gaierror:
            raise Exception("unable to connect to %s" % url)
        except ftplib.error_perm as err:
            errno = str(err).split()[0]
            if errno == '503':
                raise Exception("could not login as '%s'" % user)
            if errno == '530':
                raise Exception("invalid password for user '%s'" % user)
            if errno == '550':
                raise Exception("could not set upload directory to %s" %
                                directory)
            raise Exception("error trying to establish session: %s" % str(err))

        try:
            with open(self.upload_archive_name, 'rb') as _arcfile:
                session.storbinary(
                    "STOR %s" % self.upload_archive_name.split('/')[-1],
                    _arcfile)
            session.quit()
            return True
        except IOError:
            raise Exception("could not open archive file")
Esempio n. 6
0
path = f'/home/backup/{day}/mikrotik'

if not os.path.isdir(path):
    os.makedirs(path)

os.chdir(path)

filename_pattern = '{}_backup.rsc'

for device in devices:
    print("Connect to {}:".format(device['ip']))

    filename = filename_pattern.format(device['name'])
    try:
        with ftplib.FTP(device['ip'], device['user_name'],
                        device['users_passwd']) as con:
            with open(filename, "wb") as f:
                con.retrbinary('RETR ' + filename, f.write)
            print("    File transfer: done")
    except Exception as e:
        continue

for device in devices:
    print("Connect to {}:".format(device['ip']))

    #Создание сокета и объекта устройства
    try:
        s = libapi.socketOpen(device['ip'])
    except Exception as e:
        continue
    dev_api = libapi.ApiRos(s)
Esempio n. 7
0
import ftplib
import ftputil
import io
import gzip
from xml.dom.minidom import parse, parseString
import shutil
import sys
import socket
import os

# ftp 서버로부터 pubmed 데이터를 가져온다.
ftp = ftplib.FTP('ftp.ncbi.nlm.nih.gov')
ftp.login()
ftp.cwd('/pubmed/baseline')
file_mapper = {}

for file in ftp.nlst():
    print(file)
    # md5파일은 생략한다.
    if len(file.split('.xml.gz.md5')) > 1: continue
    # 압축 파일만 받아서 compressed data 파일에 쓴다.
    if len(file.split('.xml.gz')) > 1:
        f = open('./compressed_data/' + file, 'wb+').write
        file_mapper[file] = ftp.retrbinary('RETR ' + file, f)

# 기존에 있는 파일을 가져다가 사용하는 경우
# files = os.listdir(path = "./compressed_data")
count = 0
# for key in files:
# xml 파일 압축 해제
for key in file_mapper:
Esempio n. 8
0
    fichier_index.write("      </tbody>\n")
    fichier_index.write("    </table>\n")
    fichier_index.write(
        "    <img id='check' src='../transparent.png' width='75'>\n")
    fichier_index.write(
        "    <img id='check2' src='../transparent.png' width='75'>\n")

    fichier_index.write("  </body>\n")
    fichier_index.write("</html>\n")
    fichier_index.close()

if transfert_ftp == '1':
    print('\n\nFTP start')
    repertoire_imagme_global = '/photos_numeriques/'

    session = ftplib.FTP('www.imagme.com', 'godimagme', 'Kil01m@r')

    for root, dirs, files in os.walk(dir_transfert, topdown=False):
        print('------------------------------------------------------')
        repertoire_raspberry = os.path.basename(root)

        if not repertoire_raspberry == '':
            try:
                session.mkd(repertoire_imagme_global + repertoire_raspberry)
                print('\nCreation repertoire: ' + '[www.imagme.com] ' +
                      repertoire_imagme_global + repertoire_raspberry)

            except ftplib.error_perm:
                pass

        for f in files:
Esempio n. 9
0
import ftplib


def getFile(ftp, filename):
    try:
        ftp.retrbinary("RETR " + filename, open(filename, 'wb').write)
    except:
        print("Error")


ftp = ftplib.FTP("ftp.nluug.nl")
ftp.login("anonymous", "ftplib-example-1")

ftp.cwd('/pub/')  # change directory to /pub/
getFile(ftp, 'README.nluug')

ftp.quit()
def main():
    ''' connect to public ftp function '''
    ftp = ftplib.FTP(url.netloc)
    ftp.login()
    print('login to ' + url.netloc)
    logging.info('login to ' + url.netloc)
    stack = [url.path]
    path = stack.pop()
    ftp.cwd(path)

    # add all directories to the queue
    children = ftp_dir(ftp)
    dirs = [
        posixpath.join(path, child[1]) for child in children if not child[0]
    ]
    # set start_from directory
    while True:
        itemdir = dirs[0]
        if itemdir.split('/')[-1] != start_from.strip():
            del dirs[0]
        else:
            break

    # put values from Illinois Entities.xlsx in dictionary
    print('Creating connection with ' + illinois_entities_xlsx_file)
    wbShort = openpyxl.load_workbook(dir_in +
                                     illinois_entities_xlsx_file.strip())
    sheetShort = wbShort.get_sheet_by_name(illinois_entities_sheet.strip())
    dshort = {}
    row = 2
    scrolldown = True

    while scrolldown:
        key = str(sheetShort['A' + str(row)].value)
        if len(key) == 6:
            key = '00' + key
        elif len(key) == 7:
            key = '0' + key
        dshort[key] = sheetShort['B' + str(row)].value.strip()

        row += 1
        if sheetShort['A' + str(row)].value == None:
            scrolldown = False  # when finding empty row parsing of Shortnames xlsx will stop

    for udir in dirs:
        print('-' * 20)
        logging.info('-' * 20)
        print(udir)
        logging.info(udir)
        # example of path structure /LocGovAudits/FY2015/00100000
        parseddir = udir.split('/')[-1].strip()
        try:
            preparename = 'IL ' + dshort[parseddir] + ' ' + year + '.pdf'
        except:
            preparename = parseddir + '.pdf'
        preparename = preparename.replace('/', '')
        preparename = preparename.replace(':', '')

        ftp.cwd(udir)
        time.sleep(0.8)
        files = []

        try:
            files = ftp.nlst()
            files.sort()
        except Exception as e:
            if str(e) == "550 No files found":
                print("No files in this directory")
                logging.info(udir + " No files in this directory")
            else:
                print(str(e))
                logging.info(udir + ' ' + str(e))

        for f in files:
            with open(dir_pdfs + f, 'wb') as fobj:
                ftp.retrbinary('RETR %s' % f, fobj.write)
                print('downloading ' + f)
                logging.info('downloading ' + f)

        # if more then one pdf in ftp directory merge them
        if len(files) > 1:
            pdfline = ' '.join(files)
            if platform.system() == "Linux":
                command = 'pdftk ' + pdfline + ' cat output temp.pdf'
            if platform.system() == "Windows":
                command = 'pdftk.exe ' + pdfline + ' cat output temp.pdf'
            try:
                os.system(command)
                os.rename('temp.pdf', preparename)
                print(preparename + ' generated')
                logging.info(preparename + ' generated')
                bOK = True
            except Exception as e:
                print(udir + ' ' + pdfline + ' not generated pdf')
                print(str(e))
                logging.info(udir + ' ' + pdfline + ' not generated pdf')
                logging.info(str(e))
                bOK = False
        else:
            # check is there only one pdf file
            if len(files) == 1:
                try:
                    os.rename(dir_pdfs + files[0].strip(),
                              dir_pdfs + preparename)
                except Exception as e:
                    logging.info(str(e))
                    print(str(e))
                print(preparename + ' generated')
                logging.info(preparename + ' generated')
            else:
                print('no files in ' + udir)
                logging.info('no files in ' +
                             udir)  #this most probably will never occure

        # delete original pdf files if more then one, since if one only, with renaming it is deleted
        if len(files) > 1 and bOK:
            for f in files:
                os.remove(dir_pdfs + str(f).strip())
Esempio n. 11
0
 def _connect(self):
     """ Log in to ftp """
     self.ftpcon = ftplib.FTP(self.host, self.username, self.password)
Esempio n. 12
0
#!/usr/bin/env python

import ftplib

ftp = ftplib.FTP()
ftp.set_debuglevel(2)
ftp.connect('192.168.124.128')
ftp.login()
print ftp.getwelcome()
ftp.cwd('nide')
ftp.dir()

bufsize = 1024
filename = 'nide'
file_handler = open(filename, 'wb').write
ftp.retrbinary('RETR nide', file_handler, bufsize)
ftp.set_debuglevel(0)

ftp.quit()
Esempio n. 13
0
#!/usr/bin/python

import sys
import ftplib
import os
import time

server = "172.16.40.40"
user = "******"
password = "******"
source = "/Learning/Books/A+"
destination = "/home/technician/Downloads"
interval = 0.05

ftp = ftplib.FTP(server)
ftp.login(user, password)


def downloadFiles(path, destination):
    try:
        ftp.cwd(path)
        os.chdir(destination)
        mkdir_p(destination[0:len(destination) - 1] + path)
        print "Created: " + destination[0:len(destination) - 1] + path
    except OSError:
        pass
    except ftplib.error_perm:
        print "Error: could not change to " + path
        sys.exit("Ending Application")

    filelist = ftp.nlst()
def run_program():
	while running == True:
		montemps=time.time()
		#On récupère la liste des fichiers présent dans le dossier trié par date de modification
		tab_txt = sorted(glob.glob(current_dir + str(extension)), key=os.path.getmtime)

		nbficht = True

		for elem in tab_txt:
			tps1 = time.clock()
			con = sqlite3.connect('ftdb.sql')
			cur = con.cursor()
			if nbficht == True:
				logger.info(".........Fichiers trouvés: " + str(len(tab_txt)) + ".........")
				logger.info('-----------------------------------------------------------')
				nbficht = False

			if os.path.isfile(elem) == True:
				# On se connecte au serveur ftp
				try:
					ftp = ftplib.FTP(host)
					ftp.login(user, password)
					print "Connexion au serveur ftp réussie"
					logger.info("Connexion au serveur ftp réussie")
					logger.info('-----------------------------------------------------------')

				except Exception, e:
					print 'Erreur connexion ftp :' + str(e)
					logger.error('Erreur connexion ftp :' + str(e))
					logger.warning('-----------------------------------------------------------')
					time.sleep(600)
				#On envois le fichier 'elem' récupéré dans la boucle
				try:
					fic_open = open(str(elem), 'rb+')
					ftp.storbinary('STOR ' + str(os.path.basename(elem)), fic_open)
					fic_open.close()
					success = True

					print "Transfert de " + str(os.path.basename(elem)) + " effectué"
					logger.info("Transfert de " + str(os.path.basename(elem)) + " effectué")
					logger.info('-----------------------------------------------------------')

				except Exception, e:
					print 'Erreur lors du transfert du fichier: ' + str(e)
					logger.error('Erreur lors du transfert du fichier: ' + str(e))
					logger.warning('-----------------------------------------------------------')
					success = False

				#On vérifie si le fichier est bien présent sur le serveur distant uniquement si le transfert c'est bien passé
				try:
					if success == True:
						ftp_dist = ftp.nlst() #Instruction permettant de lister le répertoire courant du ftp
						if os.path.basename(elem) in ftp_dist:
							ftp_success = True
							elemtabmin = os.path.basename(elem).split('.')
							elemOKmin = elemtabmin[0] + '.OK'
							print elemOKmin

							elemtabs = elem.split('.')
							OKdir = str(elemtabs[0]) + '.OK'
							print OKdir

							elemOK = open(OKdir, "w")
							elemOK.close()
							time.sleep(0.1)

							ftp.storbinary('STOR ' + elemOKmin, open(OKdir))

							print "Le fichier " + str(os.path.basename(elem)) + " a bien été transféré sur le ftp distant."
							logger.info("Le fichier " + str(os.path.basename(elem)) + " a bien été transféré sur le ftp distant.")
							logger.info('-----------------------------------------------------------')
						else:
							ftp_success = False
					else:
						print "Impossible de tester si le fichier existe sur le serveur ftp"
						logger.error("Impossible de tester si le fichier existe sur le serveur ftp")
						logger.warning('-----------------------------------------------------------')
				except Exception, e:
					print "Impossible de récupérer la liste des fichiers distant: " + str(e)
					logger.error("Impossible de récupérer la liste des fichiers distant: " + str(e))
					logger.warning('-----------------------------------------------------------')
				#On vérifie que le fichier a bien été envoyé, et si c'est le cas, sois on le backup, sois un le supprime du répertoire local
				try:
					if ftp_success == True:
						maintenant = datetime.now()
						a = maintenant.date()
						b = maintenant.hour
						cur.execute("SELECT nbfichier FROM donnees_transmises WHERE date = '" + str(a) + "' AND num_heure = " + str(b) )
						all1 = cur.fetchall()

						if all1 != []:
							print all1[0][0]

							all2 = int(all1[0][0]) + 1
							print all2
							print "liste pas vide donc update"
							cur.execute("UPDATE donnees_transmises SET nbfichier =" + str(all2) + " WHERE date ='" + str(a) + "' AND num_heure =" + str(b))
							con.commit()
						else:
							print "liste vide"
							cur.execute("""INSERT INTO donnees_transmises (date, num_heure, nbfichier) VALUES(?,?,?)""", (a, b, "1"))
							con.commit()
						cur.close()
						con.close()
				except Exception, e:
					print "Erreur lors du remplissage de la base de données sqlite3: " + str(e)
Esempio n. 15
0
def upload(filename):
    session = ftplib.FTP('192.168.2.29')
    file = open(filename, "rb")
    session.storbinary('STOR ' + filename, file)
    file.close()
    session.quit()
Esempio n. 16
0
def attack(username,password,tgtHost,redirect):
	ftp = ftplib.FTP(tgtHost)
	ftp.login(username,password)
	defPages = returnDefault(ftp)
	for defPage in defPages:
		injectPage(ftp,defPage,redirect)
Esempio n. 17
0
    else:

        #10-bit method
        for line in data:

            #append 0 to string for any '-', else 1
            for letter in line:
                if (letter == ('-')):
                    string += "0"
                else:
                    string += "1"

    #output the 7-bit ASCII version of the binary string
    convertASCII(string, len(string))


#####################################MAIN#######################################

#setup connection to server
server = ftplib.FTP()
server.connect(SERVER)
server.login('anonymous')

#retrieve the permissions data from server
retrieveData()

#generate the binary string and print the 7-bit ASCII version
genString()

################################################################################
Esempio n. 18
0
    sys.exit(1)
print('success')

# get check-out file list from HOST:VIEW
print('\ncheck-out files in ' + view + ':')
co = doshellcmd(tn, 'lsco')[1:]
if not co:
    print('no checkout files')
    telnetlogout(tn)
    sys.exit(1)
for file in co:
    print(file)

# upload local files corresponding to check-outs to HOME@HOST
sys.stdout.write('\nftp ' + host + '...')
ftp = ftplib.FTP(host, user, password)
if not ftp:
    print('fail')
    telnetlogout(tn)
    sys.exit(1)
print('success, upload files to ' + host + '/home/' + user)
for file in co:
    fname = os.path.normpath(rootdir + file)
    if not os.path.isfile(fname):
        print(fname + ' not found')
        continue
    print(fname)
    cmd = 'STOR ' + ':'.join(
        file.split('/'))  # rename file to avoid name conflict
    ftp.storbinary(cmd, open(fname))
ftp.quit()
Esempio n. 19
0
def connect():
    """Connect to FTP server, login and return an ftplib.FTP instance."""
    ftp = ftplib.FTP()
    ftp.connect(HOST, PORT)
    ftp.login(USER, PASSWORD)
    return ftp
Esempio n. 20
0
    def get_url(self,
                url,
                dest,
                makedirs=False,
                saltenv='base',
                no_cache=False,
                cachedir=None):
        '''
        Get a single file from a URL.
        '''
        url_data = urlparse(url)
        url_scheme = url_data.scheme
        url_path = os.path.join(url_data.netloc, url_data.path).rstrip(os.sep)

        # If dest is a directory, rewrite dest with filename
        if dest is not None \
                and (os.path.isdir(dest) or dest.endswith(('/', '\\'))):
            if url_data.query or len(
                    url_data.path) > 1 and not url_data.path.endswith('/'):
                strpath = url.split('/')[-1]
            else:
                strpath = 'index.html'

            if salt.utils.is_windows():
                strpath = salt.utils.sanitize_win_path_string(strpath)

            dest = os.path.join(dest, strpath)

        if url_scheme and url_scheme.lower() in string.ascii_lowercase:
            url_path = ':'.join((url_scheme, url_path))
            url_scheme = 'file'

        if url_scheme in ('file', ''):
            # Local filesystem
            if not os.path.isabs(url_path):
                raise CommandExecutionError(
                    'Path \'{0}\' is not absolute'.format(url_path))
            if dest is None:
                with salt.utils.fopen(url_path, 'r') as fp_:
                    data = fp_.read()
                return data
            return url_path

        if url_scheme == 'salt':
            result = self.get_file(url,
                                   dest,
                                   makedirs,
                                   saltenv,
                                   cachedir=cachedir)
            if result and dest is None:
                with salt.utils.fopen(result, 'r') as fp_:
                    data = fp_.read()
                return data
            return result

        if dest:
            destdir = os.path.dirname(dest)
            if not os.path.isdir(destdir):
                if makedirs:
                    os.makedirs(destdir)
                else:
                    return ''
        elif not no_cache:
            dest = self._extrn_path(url, saltenv, cachedir=cachedir)
            destdir = os.path.dirname(dest)
            if not os.path.isdir(destdir):
                os.makedirs(destdir)

        if url_data.scheme == 's3':
            try:

                def s3_opt(key, default=None):
                    '''Get value of s3.<key> from Minion config or from Pillar'''
                    if 's3.' + key in self.opts:
                        return self.opts['s3.' + key]
                    try:
                        return self.opts['pillar']['s3'][key]
                    except (KeyError, TypeError):
                        return default

                self.utils['s3.query'](method='GET',
                                       bucket=url_data.netloc,
                                       path=url_data.path[1:],
                                       return_bin=False,
                                       local_file=dest,
                                       action=None,
                                       key=s3_opt('key'),
                                       keyid=s3_opt('keyid'),
                                       service_url=s3_opt('service_url'),
                                       verify_ssl=s3_opt('verify_ssl', True),
                                       location=s3_opt('location'),
                                       path_style=s3_opt('path_style', False),
                                       https_enable=s3_opt(
                                           'https_enable', True))
                return dest
            except Exception as exc:
                raise MinionError(
                    'Could not fetch from {0}. Exception: {1}'.format(
                        url, exc))
        if url_data.scheme == 'ftp':
            try:
                ftp = ftplib.FTP()
                ftp.connect(url_data.hostname, url_data.port)
                ftp.login(url_data.username, url_data.password)
                with salt.utils.fopen(dest, 'wb') as fp_:
                    ftp.retrbinary('RETR {0}'.format(url_data.path), fp_.write)
                ftp.quit()
                return dest
            except Exception as exc:
                raise MinionError(
                    'Could not retrieve {0} from FTP server. Exception: {1}'.
                    format(url, exc))

        if url_data.scheme == 'swift':
            try:

                def swift_opt(key, default):
                    '''Get value of <key> from Minion config or from Pillar'''
                    if key in self.opts:
                        return self.opts[key]
                    try:
                        return self.opts['pillar'][key]
                    except (KeyError, TypeError):
                        return default

                swift_conn = SaltSwift(swift_opt('keystone.user', None),
                                       swift_opt('keystone.tenant', None),
                                       swift_opt('keystone.auth_url', None),
                                       swift_opt('keystone.password', None))

                swift_conn.get_object(url_data.netloc, url_data.path[1:], dest)
                return dest
            except Exception:
                raise MinionError('Could not fetch from {0}'.format(url))

        get_kwargs = {}
        if url_data.username is not None \
                and url_data.scheme in ('http', 'https'):
            netloc = url_data.netloc
            at_sign_pos = netloc.rfind('@')
            if at_sign_pos != -1:
                netloc = netloc[at_sign_pos + 1:]
            fixed_url = urlunparse(
                (url_data.scheme, netloc, url_data.path, url_data.params,
                 url_data.query, url_data.fragment))
            get_kwargs['auth'] = (url_data.username, url_data.password)
        else:
            fixed_url = url

        destfp = None
        try:
            # Tornado calls streaming_callback on redirect response bodies.
            # But we need streaming to support fetching large files (> RAM
            # avail). Here we are working around this by disabling recording
            # the body for redirections. The issue is fixed in Tornado 4.3.0
            # so on_header callback could be removed when we'll deprecate
            # Tornado<4.3.0. See #27093 and #30431 for details.

            # Use list here to make it writable inside the on_header callback.
            # Simple bool doesn't work here: on_header creates a new local
            # variable instead. This could be avoided in Py3 with 'nonlocal'
            # statement. There is no Py2 alternative for this.
            #
            # write_body[0] is used by the on_chunk callback to tell it whether
            #   or not we need to write the body of the request to disk. For
            #   30x redirects we set this to False because we don't want to
            #   write the contents to disk, as we will need to wait until we
            #   get to the redirected URL.
            #
            # write_body[1] will contain a tornado.httputil.HTTPHeaders
            #   instance that we will use to parse each header line. We
            #   initialize this to False, and after we parse the status line we
            #   will replace it with the HTTPHeaders instance. If/when we have
            #   found the encoding used in the request, we set this value to
            #   False to signify that we are done parsing.
            #
            # write_body[2] is where the encoding will be stored
            write_body = [None, False, None]

            def on_header(hdr):
                if write_body[1] is not False and write_body[2] is None:
                    # Try to find out what content type encoding is used if
                    # this is a text file
                    write_body[1].parse_line(hdr)  # pylint: disable=no-member
                    if 'Content-Type' in write_body[1]:
                        content_type = write_body[1].get('Content-Type')  # pylint: disable=no-member
                        if not content_type.startswith('text'):
                            write_body[1] = write_body[2] = False
                        else:
                            encoding = 'utf-8'
                            fields = content_type.split(';')
                            for field in fields:
                                if 'encoding' in field:
                                    encoding = field.split('encoding=')[-1]
                            write_body[2] = encoding
                            # We have found our encoding. Stop processing headers.
                            write_body[1] = False

                        # If write_body[0] is False, this means that this
                        # header is a 30x redirect, so we need to reset
                        # write_body[0] to None so that we parse the HTTP
                        # status code from the redirect target.
                        if write_body[0] is write_body[1] is False:
                            write_body[0] = None

                # Check the status line of the HTTP request
                if write_body[0] is None:
                    try:
                        hdr = parse_response_start_line(hdr)
                    except HTTPInputError:
                        # Not the first line, do nothing
                        return
                    write_body[0] = hdr.code not in [301, 302, 303, 307]
                    write_body[1] = HTTPHeaders()

            if no_cache:
                result = []

                def on_chunk(chunk):
                    if write_body[0]:
                        if write_body[2]:
                            chunk = chunk.decode(write_body[2])
                        result.append(chunk)
            else:
                dest_tmp = "{0}.part".format(dest)
                # We need an open filehandle to use in the on_chunk callback,
                # that's why we're not using a with clause here.
                destfp = salt.utils.fopen(dest_tmp, 'wb')  # pylint: disable=resource-leakage

                def on_chunk(chunk):
                    if write_body[0]:
                        destfp.write(chunk)

            query = salt.utils.http.query(fixed_url,
                                          stream=True,
                                          streaming_callback=on_chunk,
                                          header_callback=on_header,
                                          username=url_data.username,
                                          password=url_data.password,
                                          opts=self.opts,
                                          **get_kwargs)
            if 'handle' not in query:
                raise MinionError('Error: {0} reading {1}'.format(
                    query['error'], url))
            if no_cache:
                if write_body[2]:
                    return six.u('').join(result)
                return six.b('').join(result)
            else:
                destfp.close()
                destfp = None
                salt.utils.files.rename(dest_tmp, dest)
                return dest
        except HTTPError as exc:
            raise MinionError('HTTP error {0} reading {1}: {3}'.format(
                exc.code, url,
                *BaseHTTPServer.BaseHTTPRequestHandler.responses[exc.code]))
        except URLError as exc:
            raise MinionError('Error reading {0}: {1}'.format(url, exc.reason))
        finally:
            if destfp is not None:
                destfp.close()
def ftp_file_upload(connection_parameter, file_list):
    ##############################################
    # Function to chek valid input format
    # Arguments:
    #   a dictionary with all the connection parameters
    #   a list containing a list() of file path to upload
    #
    #   connection_parameter = {'srv_address': 'xxx.xxx.xxx.xxx',
    #                           'ftp_usr': '******',
    #                           'ftp_psw': 'password',
    #                           'ftp_path': '/path/to/upload/'}
    # Returns:
    #   true if all files were uploaded
    #   false if something goes wrong
    ##############################################
    # TODO: Handle upload of different file type based on extension.
    logger = logging.getLogger(__name__)
    # Server address is a requirement.
    if not connection_parameter['srv_address']:
        logger.error("No FTP server address")
        return False
    if not file_list:
        logger.error("No data to transmit")
        return False
    remote = ftplib.FTP()
    try:
        logger.info("Connecting to %s", connection_parameter['srv_address'])
        remote.connect(connection_parameter['srv_address'], 21, 60)
        remote.login(connection_parameter['ftp_usr'],
                     connection_parameter['ftp_psw'])
        # Forcing passive mode
        remote.set_pasv(True)
        logger.info("Moving into remote dir: %s",
                    connection_parameter['ftp_path'])
        remote.cwd(connection_parameter['ftp_path'])
    except ftplib.all_errors as ftperr:
        logger.error("Error during FTP transmission: %s",
                     ftperr,
                     exc_info=True)
        return False

    if not file_list:
        logger.warning("No file passed for upload")
        remote.close()
        return False

    # List for upload status
    upload_status = list()
    for upload in file_list:
        if upload == '':
            logger.error("Cowardly refusing to transmit an empty string...")
            upload_status.append(False)
            continue
        logger.info("Uploading file: %s", upload)
        try:
            with open(upload, 'rb') as fp:
                # Getting only the filename for
                # ftp server compatibility (filezilla)
                filename = upload.split("/")[-1]
                # remote.storlines('STOR ' + filename, fp)
                remote.storbinary('STOR ' + filename, fp)
                upload_status.append(True)
        except IOError as file_err:
            logger.error("Error transferring file %s", upload)
            logger.error("Error during FILE operation: %s",
                         file_err,
                         exc_info=True)
            upload_status.append(False)
        except ftplib.all_errors as ftp_err:
            logger.error("Error transferring file %s", upload)
            logger.error("Error during FILE operation: %s",
                         ftp_err,
                         exc_info=True)
            upload_status.append(False)

    logger.info("Closing connection to %s",
                connection_parameter['srv_address'])
    remote.close()
    if False in upload_status:
        return False
    # If everything is fine exit with true!
    return True
Esempio n. 22
0
    "ftp://ftp.sra.ebi.ac.uk/vol1/fastq/SRR507/008/SRR5073518/SRR5073518_1.fastq.gz",
    "ftp://ftp.sra.ebi.ac.uk/vol1/fastq/SRR507/008/SRR5073518/SRR5073518_2.fastq.gz",
    "ftp://ftp.sra.ebi.ac.uk/vol1/fastq/SRR507/005/SRR5073705/SRR5073705_1.fastq.gz",
    "ftp://ftp.sra.ebi.ac.uk/vol1/fastq/SRR507/005/SRR5073705/SRR5073705_2.fastq.gz",
    "ftp://ftp.sra.ebi.ac.uk/vol1/fastq/SRR507/007/SRR5073987/SRR5073987_1.fastq.gz",
    "ftp://ftp.sra.ebi.ac.uk/vol1/fastq/SRR507/007/SRR5073987/SRR5073987_2.fastq.gz"
]


def getFile(ftp, filename):
    try:
        ftp.retrbinary("RETR " + filename, open(filename, 'wb').write)
    except:
        print("Error")


ftp = ftplib.FTP("ftp.sra.ebi.ac.uk")

ftp.login()

for link in links_of_genome_r_files:
    file_location = "/" + "/".join(link.split("/")[3:-1]) + "/"
    file_name = link.split("/")[-1]
    print("\n##############\n")
    print("File Name =>  ", file_name)
    #print("File Location => ", file_location)
    ftp.cwd(file_location)
    getFile(ftp, file_name)
    print("\n##############\n")
ftp.quit()
Esempio n. 23
0
                                    unit='bit')

                def update_download(data):
                    file.write(data)
                    progress_bar.update(len(data))

                ftp.retrbinary('RETR ' + remote_file, update_download)
                print('\n%s: Download finished\n' % time.ctime())
    except ftplib.error_perm:
        return


ip = 'ftp.merrbys.co.uk'
login = '******'
password = '******'
with ftplib.FTP(ip, login, password) as ftp:
    search_list = []
    with open('kmz_search_output.txt') as kmz_search_file:
        for line in kmz_search_file:
            if 'File' in line:
                search_list.append(line)

    for count, line in enumerate(search_list):
        print('\nFile %s of %s\n' % (count, len(search_list)))
        file_string = line.split(' ')[1].strip()
        ddm_remote_file = os.path.join(
            '/Data/L1B/',
            os.path.basename(file_string).replace('.', '/').replace(
                'kmz', 'DDMs.nc'))
        metadata_remote_file = os.path.join(
            '/Data/L1B/',
Esempio n. 24
0
"""
import os
import sys
import ftplib
from getpass import getpass
from mimetypes import guess_type

nonpassive = False  # 默认为被动FTP
remotesite = 'home.rmi.net'  # 从这个站点下载
remotedir = '.'  # 这个目录(如public_html)
remoteuser = '******'
remotepass = getpass('Password for %s on %s: ' % (remoteuser, remotesite))
localdir = (len(sys.argv) > 3 and sys.argv[1]) or '.'
cleanall = input('Clean local directory first? ')[:1] in ['y', 'Y']
print('connecting...')
connection = ftplib.FTP(remotesite)  # 连接到FTP站点
connection.login(remoteuser, remotepass)  # 用户名/密码登录
connection.cwd(remotedir)  # 进入待复制目录
if nonpassive:  # 强制主动FTP模式
    connection.set_pasv(False)  # 多数服务器采用被动模式
if cleanall:
    for localname in os.listdir(localdir):  # 尝试删除所有本地文本
        try:
            print('deleting local', localname)
            os.remove(os.path.join(localdir, localname))
        except:
            print('cannot delete local', localname)

count = 0  # 下载所有远程文件
remotefiles = connection.nlst()  # nlst()给出文件列表
for remotename in remotefiles:
Esempio n. 25
0
ap = argparse.ArgumentParser(usage=usage)
ap.add_argument('--dest', default=".", help="Destination directory; "
        "FlyBase GFF will be downloaded here and database will be created")
ap.add_argument('--list', action="store_true", help="Just list the available"
        "chromosomes to download")
ap.add_argument('--chrom', default='all-no-analysis', help="chromosome"
        " or version to download, one of %s" % CHROMS)
ap.add_argument('--make_db', action='store_true',
        help="Create a gffutils database from the downloaded file")
args = ap.parse_args()


URL = "ftp.flybase.net"
PATH = "genomes/dmel/current/gff/"

ftp = ftplib.FTP(URL)
sys.stderr.write(ftp.login())

dirlist = ftp.nlst(PATH)
sys.stderr.write('\n')
for i in dirlist:
    size = ftp.size(i)
    if ('-' + args.chrom + '-') in i:
        fn = i
        flag = 'X'
    else:
        flag = " "

    sys.stderr.write("[ %s ] [%.1f MB] %s\n" % (flag, (size / 1e6), i))

size = ftp.size(fn)
Esempio n. 26
0
def on_message(client, userdata, msg):
  global timestamp_previous_end, scheduler
  
  # Split the mqtt message into a word list.
  mqtt_msg = msg.payload.decode()
  mqtt_msg_wordlist = mqtt_msg.split()
  print(mqtt_msg_wordlist)
  
  if mqtt_msg_wordlist[0] == MQTT_POST_EXPS:
    batch_file = mqtt_msg_wordlist[1]
    username = mqtt_msg_wordlist[2]
        
    # Get the batch file from ftp
    with ftplib.FTP() as ftp:
      ftp.set_pasv(FTP_PASSIVE_MODE)
      ftp.connect(FTP_ADDRESS, FTP_PORT)
      print(ftp.getwelcome())
      try:
        ftp.login(FTP_USER, FTP_PASSWORD)
        ftp.cwd('/home/' + FTP_USER + '/' + username + '/')
        print(batch_file)
        file_size = ftp.size(batch_file)
        if(file_size > 0):
          grab_file_from_ftp(batch_file, TESTBED_TMP_PATH + '/batch_file.txt', ftp)
          f = open(TESTBED_TMP_PATH + '/batch_file.txt', "r")
          batch_list = f.readlines()   
          f.close()
          
          if(len(exp_time_list) == 0):
            date_time_now = datetime.datetime.now()
            time_now = date_time_now.time()
            timestamp_now = date_time_now.replace(tzinfo = None).timestamp()
            if(timestamp_now > (timestamp_previous_end + experiment_gap)):
              timestamp_previous_end = timestamp_now
          print(timestamp_previous_end)
          for tasks in batch_list:
            tasks_wl = tasks.split()
            experiment_duration = int(tasks_wl[2])
            print(tasks_wl) 
            
            # Compute time plus the delta of preparation.
            timestamp_actural_start = timestamp_previous_end + preparation_time
            experiment_start_time = datetime.datetime.fromtimestamp(timestamp_actural_start)
            experiment_start_time = experiment_start_time.replace(microsecond = 0)
            
            # Compute the experiment ID and schedule it.
            exp_id = tid_maker(username)
            print(exp_id)
            scheduler.add_job(testbed_scheduler_function, 'date', run_date = experiment_start_time, args = [[tasks, exp_id]], id = exp_id)
            print(experiment_start_time)
           
            # Compute end time of the experiment
            experiment_start_time_timestamp = experiment_start_time.replace(tzinfo = None).timestamp()
            experiment_end_time_timestamp = experiment_start_time_timestamp + experiment_duration * 60
            timestamp_previous_end = experiment_end_time_timestamp + experiment_gap
            experiment_end_time = datetime.datetime.fromtimestamp(experiment_end_time_timestamp)
            experiment_end_time = experiment_end_time.replace(microsecond = 0)
            
            # Generate and new element in the exp_time_list and append it.
            exp_t = exp_time()
            exp_t.name = tasks_wl[1]
            exp_t.experiment_id = exp_id
            exp_t.start = experiment_start_time_timestamp
            exp_t.end = experiment_end_time_timestamp
            exp_time_list.append(exp_t)
            print(exp_t)
                                    
          print(scheduler.get_jobs())                      
      except ftplib.all_errors as e:
        print('FTP error: ', e)        
Esempio n. 27
0
    def __genFileIndex(self, version, basedir, release, organism, where):
        files = {}
        finalPath = '%s/genomes/%s' % (
            (settings.getDataDir() if where is None else where), version)
        conn = ftplib.FTP("ftp.ensembl.org")
        conn.login()
        organisms = [
            line.split('/')[-1]
            for line in conn.nlst("%s/release-%d/gff3" % (basedir, release))
        ]

        if organism not in organisms:
            utils.msg.warning("Organism '%s' not in release %d" %
                              (organism, release))
            return {}
        #fi

        def genGFF3():
            # This is ugly, but the most stable way I was able to find the correct GFF3 file.
            # in GRCH37, it didnt match the obvious pattern...
            uri = [
                u[0] for u in sorted(
                    [(line, len(line.split('.')))
                     for line in conn.nlst("%s/release-%d/gff3/%s" %
                                           (basedir, release, organism))
                     if (len(line.split('.')) > 3)],
                    key=lambda x: x[1])
            ]
            #uri = [ line for line in conn.nlst("%s/release-%d/gff3/%s" % (basedir, release, organism)) if '%d.gff3.gz' % release in line ]
            if len(uri) > 0:
                return utils.Acquire(where=where).curl(
                    "ftp://ftp.ensembl.org/%s" % uri[0]).gunzip().finalize(
                        '%s/genes.gff3' % finalPath)
            #fi
            return None

        #edef

        def genGenome():
            uri = [
                line for line in conn.nlst("%s/release-%d/fasta/%s/dna" %
                                           (basedir, release, organism))
                if 'dna.chromosome' in line
            ]
            aos = [
                utils.Acquire(where=where).curl("ftp://ftp.ensembl.org/%s" % f)
                for f in uri
            ]
            return utils.Acquire(where=where).merge(
                aos, method='zcat').finalize('%s/dna.fasta' % finalPath)

        #edef

        def genCDS():
            uri = [
                line for line in conn.nlst("%s/release-%d/fasta/%s/cds" %
                                           (basedir, release, organism))
                if 'fa.gz' in line
            ]
            if len(uri) > 0:
                return utils.Acquire(where=where).curl(
                    "ftp://ftp.ensembl.org/%s" % uri[0]).gunzip().finalize(
                        '%s/cds.fa' % finalPath)
            #fi
            return None

        #edef

        def genAA():
            uri = [
                line for line in conn.nlst("%s/release-%d/fasta/%s/pep" %
                                           (basedir, release, organism))
                if 'all.fa.gz' in line
            ]
            if len(uri) > 0:
                return utils.Acquire(where=where).curl(
                    "ftp://ftp.ensembl.org/%s" % uri[0]).gunzip().finalize(
                        '%s/aa.fa' % finalPath)
            #fi
            return None

        #edef

        def genIDS():
            def idMapFunc(inFile, outFile):
                fasta = formats.Fasta(inFile)
                with open(outFile, 'w') as ofd:
                    ofd.write('\t'.join(
                        ['gene', 'transcript', 'protein', 'symbol']) + '\n')
                    for seq in fasta:
                        fullName = fasta[seq].fullName
                        data = dict([(p.split(':')[0],
                                      ':'.join(p.split(':')[1:]))
                                     for p in fullName.split(' ')
                                     if p.split(':')[0] in [
                                         'chromosome', 'gene', 'transcript',
                                         'gene_symbol'
                                     ]])
                        ofd.write('\t'.join([
                            data.get('gene', ''),
                            data.get('transcript', ''), seq,
                            data.get('gene_symbol', '')
                        ]))
                        ofd.write('\n')
                    #efor
                #ewith
                return 0

            #edef
            uri = [
                line for line in conn.nlst("%s/release-%d/fasta/%s/pep" %
                                           (basedir, release, organism))
                if 'all.fa.gz' in line
            ]
            if len(uri) > 0:
                return utils.Acquire(where=where).curl(
                    "ftp://ftp.ensembl.org/%s" %
                    uri[0]).gunzip().func(idMapFunc).finalize('%s/ids.tsv' %
                                                              finalPath)
            #fi
            return None

        #edef

        files["gff"] = genGFF3()
        files["genome"] = genGenome()
        files["cds"] = genCDS()
        files["aa"] = genAA()
        files["ids"] = genIDS()

        for f in files:
            if f is None:
                del files[f]
            #fi
        #efor

        conn.close()
        return files
Esempio n. 28
0
######################################################################
# Networking
# ----------
#

# TODO

######################################################################
# FTP
# ~~~
#

# Full FTP features with ftplib
import ftplib
ftp = ftplib.FTP("ftp.cea.fr")
ftp.login()
ftp.cwd('/pub/unati/people/educhesnay/pystatml')
ftp.retrlines('LIST')

fd = open(os.path.join(tmpdir, "README.md"), "wb")
ftp.retrbinary('RETR README.md', fd.write)
fd.close()
ftp.quit()

# File download urllib
import urllib.request
ftp_url = 'ftp://ftp.cea.fr/pub/unati/people/educhesnay/pystatml/README.md'
urllib.request.urlretrieve(ftp_url, os.path.join(tmpdir, "README2.md"))

######################################################################
import ftplib
import os
import socket

HOST = 'ftp.acc.umu.se'
DIR = 'Pubic/EFLIB/'
FILE = 'README'

# 1. 客户端链接远程主机上的FTP服务器
try:
    f = ftplib.FTP()
    # 通过设置调试级可以方便调试
    f.set_debuglevel(2)
    # 链接主机地址
    f.connect(HOST)
except Exception as e:
    print(e)
    exit()

print('Connected to host {0}'.format(HOST))

# 2. 客户端输入用户名和密码(或者'anonymous'和电子邮件地址)
try:
    # 登录如果没有输入用户信息,则默认使用匿名登录
    f.login()
except Exception as e:
    print(e)
    exit()
print('Logged in as "anonymous"')

# 3. 客户端和服务器进行各种文件传输和信息查询操作
years = numpy.arange(2007, 2014, 1).tolist()
url = 'ftp://ftp.meteo.uni-bonn.de/pub/reana/COSMO-REA2/TOT_PREC/'

output_folder = '/Volumes/Untitled/COSMO/REA2'

"""
file = r'/Users/janliechti/Google Drive/UNI/FS19/Geographie/Gedatenanalyse_u_Modellierung/Project_COSMO/PyCharm_project/Data/TOTAL_PRECIPITATION.SFC.200701.grb'
file_netcdf = r'/Users/janliechti/Google Drive/UNI/FS19/Geographie/Gedatenanalyse_u_Modellierung/Project_COSMO/PyCharm_project/Data/test.nc'
path_export_folder = r'/Users/janliechti/Google Drive/UNI/FS19/Geographie/Gedatenanalyse_u_Modellierung/Project_COSMO/PyCharm_project'
ds = xr.open_dataset(file, engine='cfgrib')
"""
# COSMO Rea2
for y in years:
    # download files directly via python
    ftp = ftplib.FTP(r'ftp.meteo.uni-bonn.de')
    ftp.login()
    ftp.cwd('/pub/reana/COSMO-REA2/TOT_PREC/' + str(y))
    grb_list = ftp.nlst()  # List files in folder
    os.makedirs(os.path.join(output_folder, str(y) + '_grib'))
    os.makedirs(os.path.join(output_folder, str(y) + '_netcdf'))
    for f in grb_list:
        wget.download(url + str(y) + '/' + f, os.path.join(output_folder, str(y)  + '_grib', f))
        ds = xr.open_dataset(os.path.join(output_folder, str(y) + '_grib', f), engine='cfgrib')

        time = ds.__getitem__('time')  # Timesteps --> for each hour a raster
        tp = ds.__getitem__('tp')
        steps = ds.__getitem__('step')