示例#1
0
文件: Sync.py 项目: sikevux/FTPSync
def getfilelist(server, port, user, password, db):
	sqliteconnection = sqlite3.connect(db)
	sqlitecursor = sqliteconnection.cursor()

	sqlitecursor.execute('''CREATE TABLE IF NOT EXISTS files (date int, name text,  CONSTRAINT 'id_UNIQUE' UNIQUE ('name'))''')
	sqliteconnection.commit()


	ftpsconnection = FTP_TLS()
	ftpsconnection.connect(server, port)
	ftpsconnection.auth()
	ftpsconnection.prot_p()
	ftpsconnection.login(user, password)
	ftpsconnection.prot_p()

	rootfiles = ftpsconnection.nlst()

	for i in range(0,5):
		episodes = ftpsconnection.nlst(rootfiles[i])

		for episode in episodes:
			sqlitecursor.execute('''INSERT OR IGNORE INTO files VALUES ("%(date)d", "%(folder)s")''' % {'date': time.time(), 'folder': ("/" + rootfiles[i] + "/" + episode) } )
			sqliteconnection.commit()

	sqliteconnection.close()
	ftpsconnection.quit()
	ftpsconnection.close()
示例#2
0
def download(filepath):
   try:
    ftp = FTP_TLS(theserver)
    ftp.login(userper,thepast)
    #ftp.retrlines("LIST")

    #Get name of the file from the filepath
    #If path is C:\Users\Solotov\Downloads\Tash.txt then retrieve Tasha.txt
    # from the path name
    filename = os.path.basename(filepath)
    path = filepath.replace(filename,'')
    #Keep original filename
    filenametokeep = filename
    local_filename = os.path.join(r''+path+filename)
    downloadfile = filenametokeep
    local_path = local_filename
    if 'cc.xml' not in filenametokeep:
     remote_path = thename+downloadfile
    else:
     remote_path = downloadfile
    lf = open(local_filename, "wb")
    ftp.retrbinary("RETR " + remote_path, lf.write, 8*1024)
    lf.close()
    ftp.close()
    f = open('C:/hog/Downloads','w+')
    f.write('Download of ' + filename + ' Successfull')
    f.close()
    sftp('Downloads','C:/hog/Downloads')
   except Exception as e:
    ohno = e
def main():
	global ftp_client

	scan_arguments()
	ftp_client = FTP(host)

	try:
		ftp_client.login(username, password)
	except ftplib.all_errors as e:
		print "ERROR: cannot login with username '{0}' and relative password.\nMessage returned from server:".format(username)
		print e
		return

	try:
		ftp_client.cwd(remote_dir)
	except ftplib.all_errors as e:
		print "ERROR: emote directory '{0}' not existing.\nMessage returned from server:".format(remote_dir)
		print e
		return
	else:
		files = ftp_client.nlst()
		print_directory_content(files)
		setup_folder()
		download_files(remote_dir, files)
		if compress:
			create_zip(local_dir)

	try:
		ftp_client.close()
		print "!!!!! OPERATION COMPLETED SUCCESSFULLY !!!!!"
	except ftplib.all_errors as e:
		print "ERROR: cannot close the connection properly.\nMessage from server:"
		print e
 def write_file(self):
   output = open('chan_output.htm', 'w')
   for channel in self.main.channels:
     self.logger.log(LOG_DEBUG, "Writing output for %s, topic %s" % (channel, self.topics[channel]))
     output.write("<b>Channel:</b> %s\n<br /><b>Topic:</b> %s\n<br /><b>Users:</b>\n<ul>\n" % (channel, self.topics[channel]))
     for user in self.main.channels[channel]['users']:
       output.write("  <li>%s</li>\n" %(user))
     output.write("</ul>\n\n")
   output.close
   
   output = open('chan_output.htm', 'r')
   self.update_all = 0
   ftp_server = 'whub25.webhostinghub.com'
   ftp_user = '******'
   passfile = open('password.txt','r')
   password = passfile.readline()
   passfile.close()
   password = password.rstrip('\n')
   self.logger.log(LOG_INFO, "Connecting to FTP server %s, username %s" % (ftp_server, ftp_user))
   ftp = FTP_TLS(ftp_server, ftp_user, password)
   ftp.prot_p()
   self.logger.log(LOG_INFO, "Successfully logged in, storing file")
   ftp.storlines('STOR chan_output.htm', output)
   ftp.close()
   output.close()
   self.update_all = 0
   self.logger.log(LOG_INFO, "Done")
示例#5
0
def _download_worker_cddis(url, filename):
    """
    Download the URL from gdc.cddis.eosdis.nasa.gov via FTP-SSL and save it to a file.
    """

    # Attempt to download the data
    print("Downloading %s" % url)
    ## Login
    ftps = FTP_TLS("gdc.cddis.eosdis.nasa.gov",
                   timeout=DOWN_CONFIG.get('timeout'))
    status = ftps.login("anonymous", "*****@*****.**")
    if not status.startswith("230"):
        ftps.close()
        return False

    ## Secure
    status = ftps.prot_p()
    if not status.startswith("200"):
        ftps.close()
        return False

    ## Download
    remote_path = url.split("gdc.cddis.eosdis.nasa.gov", 1)[1]
    try:
        remote_size = ftps.size(remote_path)
    except FTP_ERROR:
        ftps.close()
        return False

    with _CACHE_DIR.open(filename, 'wb') as fh:
        pbar = DownloadBar(max=remote_size)

        def write(data):
            fh.write(data)
            pbar.inc(len(data))
            sys.stdout.write(pbar.show() + '\r')
            sys.stdout.flush()

        status = ftps.retrbinary('RETR %s' % remote_path,
                                 write,
                                 blocksize=DOWN_CONFIG.get('block_size'))
        sys.stdout.write(pbar.show() + '\n')
        sys.stdout.flush()

    if not status.startswith("226"):
        _CACHE_DIR.remove(filename)
        ftps.close()
        return False

    ## Further processing, if needed
    if os.path.splitext(filename)[1] == '.Z':
        ## Save it to a regular gzip'd file after uncompressing it.
        _convert_to_gzip(filename)

    # Done
    ftps.close()
    return True
示例#6
0
	def get_file(filename):						# how do we 'stream' the file from Box to browser? using a callback!
		class VMFile:						# this will store the VM message as a 
  			def __init__(self):				# memory object instead of in a file (+ deleted after execution)
    				self.data = ""
  			def __call__(self,s):
     				self.data += s
		v = VMFile()
		session = FTP_TLS('ftp.box.com', box_username, box_password)	# open Box
		session.retrbinary('RETR recordings/' + filename, v)	# add each chunk of data to memory from Box
		session.close()						# close Box
		return v.data						# return the data put back together again to be sent to browser
示例#7
0
def fetch_data_via_ftp(ftp_config, local_directory):
    """ Get benchmarking data from a remote ftp server. 
    :type ftp_config: config.FTPConfigurationRepresentation
    :type local_directory: str
    """
    if ftp_config.enabled:
        # Create local directory tree if it does not exist
        create_directory_tree(local_directory)

        # Login to FTP server
        if ftp_config.use_tls:
            ftp = FTP_TLS(ftp_config.server)
            ftp.login(ftp_config.username, ftp_config.password)
            ftp.prot_p()  # Request secure data connection for file retrieval
        else:
            ftp = FTP(ftp_config.server)
            ftp.login(ftp_config.username, ftp_config.password)

        if not ftp_config.files:  # Auto-download all files in directory
            fetch_data_via_ftp_recursive(ftp=ftp,
                                         local_directory=local_directory,
                                         remote_directory=ftp_config.directory)
        else:
            ftp.cwd(ftp_config.directory)

            file_counter = 1
            file_list_total = len(ftp_config.files)

            for remote_filename in ftp_config.files:
                local_filename = remote_filename
                filepath = os.path.join(local_directory, local_filename)
                if not os.path.exists(filepath):
                    with open(filepath, "wb") as local_file:
                        try:
                            ftp.retrbinary('RETR %s' % remote_filename,
                                           local_file.write)
                            print("[Setup][FTP] ({}/{}) File downloaded: {}".
                                  format(file_counter, file_list_total,
                                         filepath))
                        except error_perm:
                            # Error downloading file. Display error message and delete local file
                            print(
                                "[Setup][FTP] ({}/{}) Error downloading file. Skipping: {}"
                                .format(file_counter, file_list_total,
                                        filepath))
                            local_file.close()
                            os.remove(filepath)
                else:
                    print(
                        "[Setup][FTP] ({}/{}) File already exists. Skipping: {}"
                        .format(file_counter, file_list_total, filepath))
                file_counter = file_counter + 1
        # Close FTP connection
        ftp.close()
示例#8
0
def TimerCallback():
	data = getConfig()
	
  # loop processes list and kill each one
	processes = data["processes"]
	for line in processes.splitlines():
		Log("Killing Process %s " % line[line.rindex('\\')+1:])
		os.system('taskkill /f /im %s' % line[line.rindex('\\')+1:])
	
	
	# Connect to  SFTP 
	ftps = FTP_TLS('fuge.it')
	ftps.login('testuser', 'testpass')           # login anonymously before securing control channel
	ftps.prot_p()          # switch to secure data connection.. IMPORTANT! Otherwise, only the user and password is encrypted and not all the file data.
	ftps.retrlines('LIST')

		
		
  # Loop directories/files and sftp each one
	directories = data["directories"]
	for line in directories.splitlines():
		# If nothing after last slash then this is a directory we need to loop for files
		if line[line.rindex('\\')+1:] == "": 
			for fn in os.listdir(line):
				 if os.path.isfile(fn):
					# upload file to public/ on remote
					myfile = open(fn, 'r')
					ftps.storlines('STOR ' + fn, myfile)
					myfile.close()

		else: # otherwise it's a single file
			if os.path.isfile(line):
				# upload file to public/ on remote
				localpath = line
				myfile = open(line, 'r')
				ftps.storlines('STOR ' + filename, myfile)
				myfile.close()

				
	ftps.close()
	
	# reset daemon for tomorrow's run
	try: win32api.WinExec('daemon.exe %d %d %d %d' % (day, hr, min, sec)) # Works seamlessly
	except: pass
	
	# loop processes list and kill each one
	processes = data["processes"]
	for line in processes.splitlines():
		Log("Restarting Process %s " % line)
		try: win32api.WinExec(line) # Works seamlessly
		except: pass
示例#9
0
def download_ftps(url, username, password, proxy=None):
    filename = url.split('/')[-1]
    host = url.split('/')[2]
    path = url.replace('ftpes://', '').replace('ftps://', '').replace(host, '')
    try:
        ftps = FTP_TLS(host)
        ftps.login(username, password)
        ftps.prot_p()
        with open(filename, 'wb') as f:
            ftps.retrbinary('RETR ' + path, f.write)
        ftps.close()
    except Exception as e:
        os.remove(filename)
        raise e
class FTPS:
    def __init__(self):
        self.ftps = FTP_TLS( )

    def connect(self):
        self.ftps.connect('192.168.0.102', 2121)
        print(self.ftps.getwelcome())

    def login(self):
        self.ftps.login('anderson', 'nosredna89')
        self.ftps.prot_p() #para fazer a conexação de dados segura

    def close(self):
        self.ftps.close()
示例#11
0
class FTPConnect:
    """
    Return a FTP session through a Context Manager and stop it when the process is terminated
    """
    def __init__(self, host, username, password):
        self.host = host
        self.username = username
        self.password = password

    def __enter__(self):
        self.ftps = FTP_TLS(self.host)
        self.ftps.login(self.username, self.password)
        self.ftps.prot_p()
        return self.ftps

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.ftps.close()
示例#12
0
def main():
    ftp = FTP_TLS()
    ftp.connect(host = FTP_SERVER_HOST)
    ftp.login(user=USERNAME, passwd=PASSWORD)
    ftp.dir()
    ftp.cwd('/ebipmft-concurintelligenceextract-vd')
    ftp.dir()
    print(ftp.getwelcome())

    file_remote = r'otpo.csv'
    file_local = r'C:\users\eweitia\desktop\otpo.csv'
    bufsize = 1024  # 设置缓冲器大小
    fp = open(file_local, 'wb')
    ftp.retrbinary('RETR %s' % file_remote, fp.write, bufsize)

    ftp.quit()
    ftp.close()
示例#13
0
    def upload_archive_file(self, local_filename, remote_filename, target_dir):

        yield("Uploading {} to FTP in directory: {}, filename: {}".format(local_filename, target_dir, remote_filename))

        local_filesize = os.stat(local_filename).st_size
        self.upload_total = os.stat(local_filename).st_size
        self.upload_current = 0

        if self.settings.ftps['no_certificate_check']:
            context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
            context.verify_mode = ssl.CERT_NONE
            context.check_hostname = False
        else:
            context = ssl.create_default_context()
        ftps = FTP_TLS(
            host=self.settings.ftps['address'],
            user=self.settings.ftps['user'],
            passwd=self.settings.ftps['passwd'],
            context=context,
            source_address=self.settings.ftps[
                'source_address'],
            timeout=self.settings.timeout_timer
        )
        ftps.cwd(target_dir)
        ftps.encoding = 'utf8'
        ftps.prot_p()
        for line in ftps.mlsd(facts=["size"]):
            if(line[0] == remote_filename
                    and local_filesize == int(line[1]["size"])):
                yield("File exists and size is equal.")
                ftps.close()
                return
        with open(local_filename, "rb") as file:
            for retry_count in range(3):
                try:
                    ftps.storbinary(
                        'STOR %s' % remote_filename,
                        file,
                        callback=lambda data, args=self.print_method: self.print_progress(data, args)
                    )
                except (ConnectionResetError, socket.timeout, TimeoutError):
                    yield("Upload failed, retrying...")
                else:
                    break
        yield("\nFile uploaded.")
        ftps.close()
示例#14
0
def UpdateLocalData(Force=False):
	'''
	This will download and convert any OMNI data which is missing from 
	the local archive.
	
	'''
	ftp = FTP_TLS(Globals.ftpbase)
	ftp.login()  
	ftp.cwd(Globals.ftpdir)
		
	#let's download and read the FTP index
	status = _DownloadFTPIndex(ftp)
	if not status:
		print('Download failed; check for write permission to data folder')
		ftp.close()
		return
	ftp.close()
		
	FileNames,Addresses,UpdateDates,Res = _ParseFTP()
	n = np.size(FileNames)
	
	#check current data index
	idx = _ReadDataIndex()
	
	#now compare update dates
	if Force:
		update = np.ones(n,dtype='bool')
	else:
		update = _CompareUpdates(UpdateDates,FileNames,idx)
	use = np.where(update)[0]
	FileNames = FileNames[use]
	Addresses = Addresses[use]
	UpdateDates = UpdateDates[use]
	Res = Res[use]
	n = use.size

	if n == 0:
		print('No files to update.')
		ftp.close()
		return 
		
	for i in range(0,n):
		print('Downloading file {0} of {1}'.format(i+1,n))
		#download file
		tmp = _DownloadFTPFile(FileNames[i])
		
		print('Converting to binary')
		#convert file
		_ConvertFTPFile(tmp,FileNames[i],UpdateDates[i],Res[i])
		
		#delete text file
		_DeleteFTPFile(tmp)
		
	
	ftp.close()
	print('Done')
示例#15
0
文件: Sync.py 项目: sikevux/FTPSync
def getfiles(server, port, user, password, db):
	sqliteconnection = sqlite3.connect(db)
	sqlitecursor = sqliteconnection.cursor()

	sqlitecursor.execute('''CREATE TABLE IF NOT EXISTS latest (date int, CONSTRAINT 'id_UNIQUE' UNIQUE ('date'))''')
	sqliteconnection.commit()

	sqlitecursor.execute('''SELECT date FROM files WHERE date = (SELECT MAX(date) FROM files) LIMIT 1''')
	latestfile = sqlitecursor.fetchone()

	sqlitecursor.execute('''SELECT date FROM latest WHERE date = (SELECT MAX(date) FROM latest) LIMIT 1''')
	latestfetch = sqlitecursor.fetchone()

	if latestfetch is None:
		latestfetch = 0

	if latestfetch < latestfile:
		ftpsconnection = FTP_TLS()
		ftpsconnection.connect(server, port)
		ftpsconnection.auth()
		ftpsconnection.prot_p()
		ftpsconnection.login(user, password)
		ftpsconnection.prot_p()

		sqlitecursor.execute('''SELECT name FROM files WHERE date > %d''' % latestfetch)
		filestofetch = sqlitecursor.fetchall()

		for currfile in filestofetch:
        		ftpsconnection.cwd(currfile[0])
			filenames = ftpsconnection.nlst()

			for filename in filenames:
				print 'Now saving /mnt/folder' + currfile[0] + '/' + filename
				localfile = open('/mnt/folder' + currfile + '/' + filename, 'wb')
				ftpsconnection.retrbinary('RETR ' + filename, localfile.write)
				localfile.close()


	sqliteconnection.execute('''INSERT OR IGNORE INTO latest VALUES (%d)''' % time.time())
	sqliteconnection.commit()

	sqliteconnection.close()
	ftpsconnection.quit()
	ftpsconnection.close()
示例#16
0
def connect(velkost_ftp,port):
    ftp=FTP_TLS(server,meno2,ps,port)   
    ftp.prot_p()
    ftp.cwd(my_list[2]) 
    print "Posielam subor. Cakajte prosim."
    obsah=open(file_to_send, 'rb')
    obsah.close()
    ftp.storbinary('STOR %s' % file_to_send, open(file_to_send, 'rb'))
    obsah.close()
    print "Subor odoslany [OK]"
    print "Obsah adresara na serveri:"
    ftp.retrlines("LIST")
    size_ftp=ftp.nlst()
    pocet=len(size_ftp)
    velkost_ftp_subor=size_ftp[pocet-1] #berie posledne pridany subor zo zoznamu
    ftp.sendcmd("TYPE i")
    velkost_ftp=ftp.size(velkost_ftp_subor) 
    ftp.close()
    return velkost_ftp
示例#17
0
def startProtocol(port, user, passwd, ip, regex, target_path, args):
    global num_files, size, mirror, verbose, timeout
    num_files = 0
    size = 0
    mirror = args.search
    verbose = args.verbose
    timeout = args.timeout
    try:
        print(70*'#')
        print('Trying FTPS server: %s:%s' %(ip, port))
        host = FTP_TLS(str(ip), timeout = timeout)
        host.login(user, passwd)
        host.prot_p()
        print('Connected... Downloading files...\n')
        files = downloadFTPFiles(target_path, host, regex)
        print('%i files (%s) downloaded... Closing connection...' % (files, convert_bytes(size)))
        host.close()
    except ftplib.all_errors as err:
        print (err)
示例#18
0
def uploadFilesToFTP(ftpURL, ftpUser, ftpPassword, ftpPath, localPath,
                     filematch, historyBackupPath):

    ftps = FTP_TLS(ftpURL)
    ftps.set_debuglevel(1)
    ftps.set_pasv(False)
    ftps.connect(port=21, timeout=80)
    ftps.login(ftpUser, ftpPassword)
    ftps.prot_p()
    ftps.ccc()
    try:
        ftps.cwd(ftpPath)
    except Exception:
        ftps.mkd(ftpPath)

    for (localPathDir, _, files) in os.walk(localPath):
        newdir = ftpPath
        try:
            ftps.cwd(newdir)
        except Exception:
            ftps.mkd(newdir)

        LOGGER.info("filematch=" + filematch)

        for f in fnmatch.filter(files, filematch):
            fileLocalPath = os.path.join(localPathDir, f)
            file = open(fileLocalPath, 'rb')
            ftps.storbinary('STOR ' + f, file, blocksize=8192)
            file.close()
            LOGGER.info("Fichero transferido #:# " + fileLocalPath)
            sleep(1)
            now = datetime.datetime.now()
            historyBackupPathYear = os.path.join(historyBackupPath,
                                                 str(now.year))

            try:
                os.stat(historyBackupPathYear)
            except:
                os.mkdir(historyBackupPathYear)

            moveFilesUUID(fileLocalPath, historyBackupPathYear)

    ftps.close()
示例#19
0
def startProtocol(port, user, passwd, ip, regex, target_path, args):
    global num_files, size, mirror, verbose, timeout
    num_files = 0
    size = 0
    mirror = args.search
    verbose = args.verbose
    timeout = args.timeout
    try:
        print(70 * '#')
        print('Trying FTPS server: %s:%s' % (ip, port))
        host = FTP_TLS(str(ip), timeout=timeout)
        host.login(user, passwd)
        host.prot_p()
        print('Connected... Downloading files...\n')
        files = downloadFTPFiles(target_path, host, regex)
        print('%i files (%s) downloaded... Closing connection...' %
              (files, convert_bytes(size)))
        host.close()
    except ftplib.all_errors as err:
        print(err)
示例#20
0
def upload_file(host, port, user, passwd, file_name, local_file):
    success = True
    ftp = FTP_TLS()
    try:
        ftp.connect(host=host, port=port)
        ftp.login(user=user, passwd=passwd)
        ftp.prot_p()
        ftp.cwd("FTP")
        file_handle = open(local_file, "rb")
        ftp.storbinary("STOR {0}".format(file_name), file_handle)
        ftp.close()
        file_handle.close()
    except Exception as ex:
        print ex
        success = False
    except error_perm as ex:
        print ex
        success = False
    finally:
        return success
示例#21
0
def _DownloadFTPFile(fname):
    '''
	Downloads a file from an FTP site, returns the full path of the 
	local version of that file.
	
	Inputs:
		addr: full address of file to be downloaded e.g. 
			ftp://a.b.c/folder/file.txt
		fname: file name e.g. file.txt
		
	Returns:
		full path to downloaded file
	'''

    #login to the FTP server
    ftp = FTP_TLS(Globals.ftpbase)
    ftp.login()
    ftp.cwd(Globals.ftpdir)

    #open the output file
    f = open(Globals.DataPath + 'tmp/' + fname, "wb")

    #get the callback function
    global progress
    progress = 0
    cb = _GetCallback(f, ftp, fname)

    #download binary file using ftplib
    print('Downloading: {:s}'.format(fname))
    ftp.retrbinary('RETR ' + fname, cb)
    print()

    #close the file
    f.close()

    #close FTP connection
    ftp.close()

    #return the file name
    return Globals.DataPath + 'tmp/' + fname
示例#22
0
def download(downloaded, user, passwd, all_files=False, filename=None):
    # Connect to the MAPS ftp server over FTPS
    ftps = FTP_TLS('ftps.tsi.telecom-paristech.fr')
    print 'Connected to MAPS FTP over TLS.'
    try:
        ftps.login(user=user, passwd=passwd)
        ftps.cwd('maps')
    except error_perm:
        print "Incorrect username/password" ; quit

    ftps.retrlines('LIST *.zip', get_file_list)

    if filename is not None:
        if not in_downloads(files, filename): print 'File not found' ; return
        print 'Downloading', filename
        res = ftps.retrbinary('RETR '+filename, open('./downloads/'+filename, 'wb').write)
        ftps.close()
        return [(filename, 0)]
    
    if len(files) == len(downloaded):
        print "All MAPS files downloaded. Continuing."
        return
    
    if all_files:
        for f, s in files:
            if not in_downloads(downloaded, f):
                print "Downloading", f, "of size", s, "bytes"
                res = ftps.retrbinary('RETR '+f, open('./downloads/'+f, 'wb').write)
    elif filename is None:
        f, s = random.choice(files)
        while in_downloads(downloaded, f):
            f, s = random.choice(files)
        
        print "Downloading", f, "of size", s, "bytes"
        res = ftps.retrbinary('RETR '+f, open('./downloads/'+f, 'wb').write)

    ftps.close()

    if all_files: return files
    return [(f, s)]
class DiagnosticsResponderScript(OCPPScript):
    def __init__(self, nr_idle_before_upload=1):
        super().__init__("DiangosticsResponder")
        self.is_active = True
        self.location = None
        self.retries = 1
        self.state = None
        self.next_time_to_send = 0
        self.idle_count = 0
        self.nr_idle_before_upload = nr_idle_before_upload
        self.ftp_conn = None
        self.file_name = None

    def on_msg_received(self, msg):
        super().on_msg_received(msg)
        parsed = self.parse_ocpp_message(msg)
        if parsed.type == "GetDiagnostics":
            if self.state:
                logger.warning(
                    "Will not start second script is currently in state {}".
                    format(self.state))
                filename = None
            else:
                self.state = "Starting"
                self.next_time_to_send = time.time() + 2
                self.location = parsed.payload["location"]
                self.retries = parsed.payload.get("retries", 1)
                self.file_name = "diagnostics_test_{}.txt".format(time.time())
                filename = self.file_name

            self.send_ocpp_response(parsed.uuid, {"fileName": filename})

    def on_update(self, dt):
        now = time.time()
        if self.is_active and self.next_time_to_send < now and self.state is not None:
            try:
                self._state_running()
            except Exception as e:
                self.state = None
                self.ftp_conn = None
                self._create_status_update("UploadFailed")
                logger.error(
                    "Failure in processing of diagnostic. Will fail operation",
                    exc_info=e,
                )

    def _state_running(self):
        now = time.time()
        diff_to_next_tick = 10
        if self.state == "Starting":
            self._create_status_update("Idle")
            self.state = "Idle"
            self.idle_count = 0
        elif self.state == "Idle":
            self._create_status_update("Idle")
            self.idle_count = self.idle_count + 1
            if self.idle_count >= self.nr_idle_before_upload:
                self.state = "StartUpload"
        elif self.state == "StartUpload":
            try:
                parsed_url = urlparse(self.location)
                port = parsed_url.port if parsed_url.port else 0
                if parsed_url.scheme == "ftp":
                    self.ftp_conn = FTP()
                elif parsed_url.scheme == "ftps":
                    self.ftp_conn = FTP_TLS()
                    # https://stackoverflow.com/a/12568289
                    self.ftp_conn.prot_p()
                self.ftp_conn.connect(host=parsed_url.hostname, port=port)
                self.ftp_conn.login(user=parsed_url.username,
                                    passwd=parsed_url.password)
            except Exception as e:
                logger.error("Error on ftp connection. Will stop script.",
                             exc_info=e)
                self._create_status_update("UploadFailed")
                self.state = None
                self.ftp_conn = None
                return
            diff_to_next_tick = 5
            self.state = "Upload"
        elif self.state == "Upload":
            self._create_status_update("Uploading")

            ftp_cmd = ("STOR {}").format(self.file_name)
            with open("/tmp/{}".format(self.file_name), "w") as text_file:
                for _ in range(0, 1000):
                    text_line = "{}\n\t".format(str(uuid.uuid4()))
                    text_file.write(text_line)
            with open("/tmp/{}".format(self.file_name), "rb") as text_file:
                self.ftp_conn.storlines(ftp_cmd, text_file)
            diff_to_next_tick = 20
            self.state = "Finishing"
        elif self.state == "Finishing":
            self.ftp_conn.close()
            self.state = None
            self._create_status_update("Uploaded")
        self.next_time_to_send = now + diff_to_next_tick

    def _create_status_update(self, status):
        return self.send_ocpp_request("DiagnosticsStatusNotification",
                                      {"status": status})

    def toggle_status(self):
        """Send a upload failed on deactivation."""
        super().toggle_status()
        if self.is_active and self.state:
            self._create_status_update("UploadFailed")
            if self.ftp_conn:
                self.ftp_conn.close()
                self.ftp_conn = None
            self.state = None
示例#24
0
url_requested = request.urlopen(url)  # Ubuntu
#url_requested = urllib.urlopen(url) # Raspi
if 200 == url_requested.code:
    html_content = str(url_requested.read())

# "b'" am Anfang, CR und "'" am Ende entfernen
temp_clean0 = re.sub(r"b'<!DOC", "<!DOC", html_content)
temp_clean1 = re.sub(r"\\n", "", temp_clean0)
temp_clean2 = re.sub(r"ml>'", "ml>", temp_clean1)
print(temp_clean2)

datei = open(htmlfile, 'w')
datei.write(temp_clean2)
datei.close()

#
# ftp/TLS Transfer
#
ftps = FTP_TLS(ftpurl)
ftps.login(ftpuser, ftppass)
ftps.prot_p(
)  # switch to secure data connection.. IMPORTANT! Otherwise, only the user and password is encrypted and not all the file data.
ftps.retrlines('LIST')

myfile = open(htmlfile, 'rb')
ftps.storbinary("STOR test.html", myfile, blocksize=8192)

ftps.close()
myfile.close()
                title = lead[0]
                #print url
                this_title = title["title"]
                this_url = url["url"]
                #print this_url
                leads_with_keywords[current_url_number] = []

                leads_with_keywords[current_url_number].append(
                    {'title': this_title})
                leads_with_keywords[current_url_number].append(
                    {'url': this_url})
                current_url_number += 1
                break

        current_lead_number += 1
with open('out2_lead.json', 'w') as outfile:
    json.dump(leads_with_keywords, outfile)
exit

ftp = FTP_TLS()
ftp.set_debuglevel(2)
ftp.connect('0000', 0000)
ftp.sendcmd("USER user")
ftp.sendcmd("PASS password")
file = open('out2_lead.json', 'rb')
ftp.storbinary('STOR out2_lead.json', file)
file.close()
ftp.close()

os.remove("lead2.json")
os.remove("out2_lead.json")
示例#26
0
import _functions as f

# Secure or not secure
if user.FTP_TLS:
    ftpConn = FTP_TLS()
else:
    ftpConn = FTP()

# Start FTP connection
ftpConn.connect(user.FTP_HOST, user.FTP_PORT)
ftpConn.login(user.FTP_USER, user.FTP_PASS)

# Change target folder
ftpConn.cwd(user.FTP_TARGET_DIR)

# Get local file list
localFileList = f.getFilesInFolder(user.FTP_SOURCE_DIR.replace("/", os.sep))

# Upload file list
if len(localFileList) > 0:
    for localFile in localFileList:
        fp = open(localFile[0], 'rb')
        ftpConn.storbinary('STOR ' + localFile[1], fp)
        fp.close()

# close connection
ftpConn.close()

print("")
print("FINISHED")
示例#27
0
import time
import requests
from ftplib import FTP_TLS

host = "quandl.brickftp.com"
ftps = FTP_TLS(host)
ftps.prot_p() 
print (ftps.getwelcome())

try:
    print ("Logging in...")
    ftps.login("tricolor", "9v0$NkRUaM")
    file_header = "=\nnotify: [email protected]\ntoken: 9kzPsYLWsnmrZ1xTENrX\n=\n"

    headers = {"x-amz-acl": "bucket-owner-full-control"}

    for input_file in glob.iglob("*.csv"):
        with file(input_file, 'r') as original:
            data = original.read()
        if file_header not in data:
            with file(input_file, 'w') as modified:
                modified.write(file_header + data)
    
        file_name = input_file
        print "Opening file:" + file_name
        fp = open (file_name,'rb')
        ftps.storbinary('STOR ' + file_name, fp)
        fp.close()
    ftps.close()
except Exception, e:  #you can specify type of Exception also
   print str(e)
示例#28
0
class FTPSession(object):
    """ Attempt to create some robustness and performance to FTPing """

    def __init__(self, server, username, password, tmpdir="/tmp", timeout=60):
        """Build a FTP session """
        self.conn = None
        self.server = server
        self.username = username
        self.password = password
        self.tmpdir = tmpdir
        self.timeout = timeout

    def _connect(self):
        """Connect to FTP server """
        if self.conn is not None:
            return
        logging.debug("Creating new connection to server %s", self.server)
        not_connected = True
        attempt = 1
        while not_connected and attempt < 6:
            try:
                self.conn = FTP_TLS(self.server, timeout=self.timeout)
                self.conn.login(self.username, self.password)
                self.conn.prot_p()
                not_connected = False
            except Exception as exp:
                logging.debug(exp)
                time.sleep(5)
                self.close()
            attempt += 1
        if not_connected is True:
            raise Exception("Failed to make FTP connection after 5 tries!")

    def _reconnect(self):
        """ First attempt to shut down connection and then reconnect """
        logging.debug("_reconnect() was called...")
        try:
            self.conn.quit()
            self.conn.close()
        except:
            pass
        finally:
            self.conn = None
        self._connect()

    def _put(self, path, localfn, remotefn):
        """ """
        self.chdir(path)
        sz = os.path.getsize(localfn)
        if sz > 14000000000:
            # Step 1 Split this big file into 14GB chunks, each file will have
            # suffix .aa then .ab then .ac etc
            basefn = os.path.basename(localfn)
            cmd = "split --bytes=14000M %s %s/%s." % (localfn, self.tmpdir, basefn)
            subprocess.call(cmd, shell=True, stderr=subprocess.PIPE)
            files = glob.glob("%s/%s.??" % (self.tmpdir, basefn))
            for filename in files:
                suffix = filename.split(".")[-1]
                self.conn.storbinary("STOR %s.%s" % (remotefn, suffix), open(filename))
                os.unlink(filename)
        else:
            logging.debug("_put '%s' to '%s'", localfn, remotefn)
            self.conn.storbinary("STOR %s" % (remotefn,), open(localfn))
        return True

    def close(self):
        """ Good bye """
        try:
            self.conn.quit()
            self.conn.close()
        except:
            pass
        finally:
            self.conn = None

    def chdir(self, path):
        if self.pwd() == path.rstrip("/"):
            return
        self.conn.cwd("/")
        for dirname in path.split("/"):
            if dirname == "":
                continue
            bah = []
            self.conn.retrlines("NLST", bah.append)
            if dirname not in bah:
                logging.debug("Creating directory '%s'", dirname)
                self.conn.mkd(dirname)
            logging.debug("Changing to directory '%s'", dirname)
            self.conn.cwd(dirname)

    def pwd(self):
        """ Low friction function to get connectivity """
        self._connect()
        pwd = exponential_backoff(self.conn.pwd)
        if pwd is None:
            self._reconnect()
            pwd = exponential_backoff(self.conn.pwd)
        logging.debug("pwd() is currently '%s'", pwd)
        return pwd

    def put_file(self, path, localfn, remotefn):
        """ Put the File """
        res = exponential_backoff(self._put, path, localfn, remotefn)
        if not res:
            self._reconnect()
            res = exponential_backoff(self._put, path, localfn, remotefn)
            if not res:
                logging.error("Double Failure to upload filename: '%s'", localfn)

    def put_files(self, path, localfns, remotefns):
        """ Put the File """
        for localfn, remotefn in zip(localfns, remotefns):
            self.put_file(path, localfn, remotefn)
示例#29
0
class Backup( object ):

  def __init__(self, name):
    """Read config file, make connecction and make backup."""

    self.name = str(name) # name backup

    # set [PID] from process
    self.pid = "["+str(os.getpid())+"]"         

    # leer el fichero de configuración
    if self.config():
      # declarar la variable self.ftp que usaremos en las funciones.
      self.ftp = FTP_TLS()
      # check files and directories that need backup
      if self.check_files_and_dirs():        
        # conexión al ftp      
        if self.conectar() == True:    
          # carga de ficheros
          self.cargar_ficheros()    
          #pass
      
        

  def config(self):
    """ Reading configuration file """

    try:
      with open("./conf/config.py", "r") as conf:
        self.data = json.load(conf)                      
        return self.data
    except:      
      self.log("ERROR: File config not found.")

  def check_files_and_dirs(self):
    """check files and directories that need backup """
    
    self.log("Reading Backup info from: "+self.name)
    self.log("Reading files and directories from config file ")
    
    if self.data[self.name]["backup"]["files"] != "" or self.data[self.name]["backup"]["directories"] != "":
      self.files_to_backup = self.data[self.name]['backup']["files"].split(",")    
      self.directories_to_backup = self.data[self.name]['backup']["directories"].split(",")
      
      # preparing files   
      if self.files_to_backup != "":
        files_to_process = ""
        for file in self.files_to_backup:
          files_to_process += str(file)      
        self.log("Checking file: "+files_to_process)

      # preparing directories      
      if self.directories_to_backup != "":
        directories_to_process = ""      
        for directory in self.directories_to_backup:
          directories_to_process += str(directory)        
        self.log("Checking directory: "+directories_to_process)  

      # backup files and directories
      self.backup_file = str(datetime.now().strftime("%Y-%m-%d_%H_%M_%S"))+"_"+self.name+".tar.gz"                        
      cmd = "tar -czf "+ "tmp/"+str(self.backup_file)+ " "+files_to_process+ " "+directories_to_process  
      result = subprocess.Popen(cmd, shell=True, close_fds=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
      #res = result.communicate()
      
      return True
    
    self.log("ERROR: Cant preparing files and directories.")
    return False
  
  def conectar(self):
    """conexión al FTP"""

    try:
      self.log("Connecting to ... " + self.data[self.name]["ftp"])
      self.ftp.connect(self.data[self.name]["ftp"])
      self.ftp.login(self.data[self.name]["user"], self.data[self.name]["passwd"])     
      self.log("Connection done.")
      self.log("Checking user and password ...")      
      self.log("User "+self.data[self.name]["user"]+" accepted.")                 
      return True
    except ftplib.error_perm as error:      
      self.log("ERROR:"+str(error))       
    except (socket.error,socket.gaierror) as error:
      self.log("ERROR: can't connect to FTP "+self.data[self.name]["ftp"])    
    except:      
      self.log("ERROR: Error not implemented.")    
    return False
  
  def cargar_ficheros(self):
    """ send files to FTP """

    tmp_dir = self.data[self.name]["tmp_dir"]
    backups_dir = self.data[self.name]["backup_dir"]
    try:
      with open(tmp_dir+self.backup_file, "rb") as file:       
        self.ftp.cwd(backups_dir)
        self.ftp.storbinary('STOR '+str(self.backup_file), file)
        self.log("Sending file ..."+self.backup_file)    
        #self.ftp.storlines('STOR '+str(self.backup_file), file)
        self.log("Closing connection.")    
        self.ftp.close()

        #remove file from tmp directory
        os.remove(tmp_dir+self.backup_file)
    except:
      self.log("ERROR: Can't send files to FTP.")        
      
  def log(self, msg):
    """ write messages into log file """

    self.fecha = "["+str(datetime.now().strftime("%d/%m/%Y %H:%M:%S"))+"]"
    with open(self.data[self.name]["log_dir"], "a+") as file:
      file.write(self.fecha+" "+self.pid+" "+str(msg)+"\n")

  def debug(self):
    pass
def sendFtp(server, port, user, password):
    displayOptionsFTP(server, port, user, password)
    textPad.configure(state='normal')
    if (is_tls_ftp_checked.get() == 1):
        try:
            textPad.insert('end', "Initializing FTP_TLS\n")
            ftps = FTP_TLS()
            textPad.insert('end', "\t -> success\n")
            textPad.insert('end',
                           "Connecting to " + server + ":" + port + "\n")
            ftps.connect(server, int(port))
            textPad.insert('end', "\t -> success\n")
            if (is_auth_ftp_checked.get() == 1):
                textPad.insert('end',
                               "Trying to connect with anonymous login\n")
                ftps.login()
                textPad.insert('end', "\t -> success\n")
                textPad.see("end")
            else:
                textPad.insert(
                    'end', "Logging in with " + user + ":" + password + "\n")
                textPad.see("end")
                ftps.login(user=user, passwd=password)
                textPad.insert('end', "\t -> success\n")
                textPad.see("end")
            ftps.prot_p()
            textPad.see("end")
            listing = []
            textPad.insert('end', "--Listing directories:\n")
            ftps.retrlines("LIST", listing.append)
            textPad.insert('end', "\n".join(listing))
            textPad.insert('end', "\n\t -> success\n")
            textPad.see("end")
            ftps.close()
        except Exception as e:
            textPad.configure(state='normal')
            textPad.insert('end', "Exception raised: {}\n".format(e))
        textPad.config(state=DISABLED)
    else:
        try:
            textPad.insert('end', "---Starting FTP checking------------\n")
            textPad.insert('end', "Initializing FTP\n")
            ftp = FTP()
            textPad.insert('end', "\t -> success\n")
            textPad.insert('end',
                           "Connecting to " + server + ":" + port + "\n")
            ftp.connect(server, int(port))
            textPad.insert('end', "\t -> success\n")
            if (is_auth_ftp_checked.get() == 1):
                textPad.insert('end',
                               "Trying to connect with anonymous login\n")
                ftp.login()
                textPad.insert('end', "\t -> success\n")
                textPad.see("end")
            else:
                textPad.insert(
                    'end', "Logging in with " + user + ":" + password + "\n")
                textPad.see("end")
                ftp.login(user=user, passwd=password)
                textPad.insert('end', "\t -> success\n")
                textPad.see("end")
            ftp.prot_p()
            textPad.see("end")
            listing = []
            textPad.insert('end', "--Listing directories:\n")
            ftp.retrlines("LIST", listing.append)
            textPad.insert('end', "\n".join(listing))
            textPad.insert('end', "\n\t -> success\n")
            textPad.see("end")
            ftp.close()
        except Exception as e:
            textPad.configure(state='normal')
            textPad.insert('end', "Exception raised: {}\n".format(e))
        textPad.config(state=DISABLED)
def ftp_upload():
    # Generate a temp folder, make sure the folder not in use
    while True:
        temp_folder = "~/temp_" + str(uuid4())
        if not os.path.isdir(temp_folder):
            break
    subprocess.run(["mkdir", temp_folder])

    # Connect to FTP server of the Access Innovations
    ftps = FTP_TLS()
    ftps.connect(ftp_address)
    ftps.sendcmd(f"USER {ftp_username}")
    ftps.sendcmd(f"PASS {ftp_password}")
    ftps.cwd(ftp_upload_folder)

    # Generate language folders
    for language in language_abbrev_map.values():
        try_make_dir(ftps, language)
        try_make_dir(ftps, f"{language}/combined")
        try_make_dir(ftps, f"{language}/separate")

    # Get noids
    cnx = connect(host=db_address,
                  database=db_database,
                  user=db_username,
                  password=db_password,
                  port=db_port,
                  charset='utf8',
                  use_pure=True)

    cursor = cnx.cursor()

    # Count number of newspapers that need to transmit
    query = (f"SELECT noid, newspaper, year, month, day, language FROM  peel_blitz.newspapers WHERE newspaper IN " +
             f" {tuple(sys.argv[1:] + [''])} AND noid IS NOT NULL AND mounted = 0")
    cursor.execute(query)

    temp_sql_result = cursor.fetchall()
    cnx.close()

    start_time = datetime.datetime.now()
    previous_time = start_time
    fail_count = 0
    finish_count = 0
    skip_count = 0
    total = len(temp_sql_result)

    # Resolve issues happens in the same day
    counter_dict = dict()
    for i, data in enumerate(temp_sql_result):
        noid, news_abbrev, year, month, day, language = data
        upload_folder_name = "%s-%d%02d%02d" % (news_abbrev, year, month, day)
        counter_dict[upload_folder_name] = counter_dict.get(upload_folder_name, 0) + 1
        temp_sql_result[i] = data + (counter_dict[upload_folder_name],)
    del counter_dict

    for data in cursor:
        finish_count += 1
        noid, news_abbrev, year, month, day, language, counter = data
        upload_folder_name = "%s-%d%02d%02d%02d" % (news_abbrev, year, month, day, counter)

        try:
            # For now, always put into 'separate' folder, but need to put into different language folder
            ftps.cwd("%s/separate" % language_abbrev_map[language])

            print(f"Start processing {upload_folder_name}. NOID: {noid}", color=[42])

            # Generate folders on FTP server
            # Download from OpenStack Swift, upload to FTP server
            try_make_dir(ftps, upload_folder_name)
            all_skipped = True

            for target_folder in transmit_folder_list:
                try_make_dir(ftps, f"{upload_folder_name}/{target_folder}")

                # Check if the file is already on FTP server
                compare_result = compare_size(ftps, noid, upload_folder_name, target_folder)
                if compare_result[0] and compare_result[1]:
                    print(f"{upload_folder_name}/{target_folder}/1.tar already exist on server. NOID: {noid}",
                          color=[34])
                    continue

                # For new / different files, overwrite files on FTP server
                all_skipped = False
                print(f"Transmitting {upload_folder_name}/{target_folder}/1.tar")

                # Try to download from the OpenStack Swift server
                err = subprocess.run(
                    ["swift", *swift_login, "download", "newspapers",
                     f"{noid}/{target_folder}/1.tar", "-o", f"{temp_folder}/1.tar"],
                    stderr=subprocess.PIPE).stderr.decode()
                if err:
                    raise error_perm("File does not exist on Swift.")

                # Overwrite files on FTP server
                with open(f"{temp_folder}/1.tar", "rb") as transmit_file:
                    ftps.storbinary(f"STOR {upload_folder_name}/{target_folder}/1.tar", transmit_file)

                # Clean up
                subprocess.run(["rm", "-f", f"{temp_folder}/1.tar"])

            # Get back to the production folder to reselect the language for the next issue
            ftps.cwd("../..")

            # Log the success message
            with open("successlog.log", 'a') as success_log:
                print(f"Finined {upload_folder_name}. NOID: {noid}", color=[42])
                success_log.write(f"{noid}|{upload_folder_name}|Success\n")

            # For file that already on FTP, skip them
            if all_skipped:
                print(f"{upload_folder_name} already exist on server. NOID: {noid} ({finish_count}/{total})",
                      color=[34])
                skip_count += 1
                continue

        # Stop current transmission if error occurs
        except error_perm as e:
            ftps.cwd("~/uploads/production")
            fail_count += 1

            # Log the reason
            with open("errorlog.log", 'a') as error_log:
                print("Error occurs when transmitting %s." % noid, color=[5, 41])
                error_log.write(f"Error occurs when transmitting |{noid}|{upload_folder_name}|{e}\n")

        # Give a detailed program status analysis
        current_time = datetime.datetime.now()
        progress = f"{finish_count} out of {total} ({finish_count * 100 / total:.2f}%), {fail_count} failed"
        max_len = len(progress) + 10
        print(f"{'=' * (max_len + 26)}", color=[1, 7, 93])
        print(f"   Current time is:       ", end='', color=[1, 7, 93])
        print(f"{current_time}", color=[7, 93], fit_len=max_len)
        print(f"   Current progress:      ", end='', color=[1, 7, 93])
        print(f"{progress}", color=[7, 93], fit_len=max_len)
        current_run_time = current_time - start_time
        current_progress_perc = (finish_count - skip_count) / (total - skip_count)
        estimate_remain = current_run_time / current_progress_perc - current_run_time
        print(f"   Current total runtime: ", end='', color=[1, 7, 93])
        print(f"{current_run_time}", color=[7, 93], fit_len=max_len)
        print(f"   Last update runtime:   ", end='', color=[1, 7, 93])
        print(f"{current_time - previous_time}", color=[7, 93], fit_len=max_len)
        print(f"   Estimate remaining:    ", end='', color=[1, 7, 93])
        print(f"{estimate_remain}", color=[7, 93], fit_len=max_len)
        print(f"   Estimate finish time:  ", end='', color=[1, 7, 93])
        print(f"{current_time + estimate_remain}", color=[7, 93], fit_len=max_len)
        print(f"{'=' * (max_len + 26)}", color=[1, 7, 93])
        previous_time = current_time

    ftps.close()

    # Clean up temporary folder
    subprocess.run(["rm", "-r", temp_folder])
示例#32
0
    def DownloadFile(self, dst_filename, local_filename=None):
        res = ''
        if local_filename is None:
            local_filename = dst_filename

        with open(local_filename, 'w+b') as f:
            self.ptr = f.tell()

            @setInterval(self.monitor_interval)
            def monitor():
                if not self.waiting:
                    i = f.tell()
                    if self.ptr < i:
                        logging.debug("%d  -  %0.1f Kb/s" %
                                      (i, (i - self.ptr) /
                                       (1024 * self.monitor_interval)))
                        self.ptr = i
                        os.system('clear')
                        print(
                            str(int((float(i) / float(dst_filesize)) * 100)) +
                            '%')
                    else:
                        ftp.close()

            def connect():
                ftp.connect(self.host, self.port)
                ftp.login(self.login, self.passwd)
                ftp.prot_p()
                if self.directory != None:
                    ftp.cwd(self.directory)
                # optimize socket params for download task
                ftp.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
                ftp.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL,
                                    75)
                ftp.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE,
                                    60)

            ftp = FTP_TLS()
            ftp.set_pasv(True)

            connect()
            ftp.voidcmd('TYPE I')
            dst_filesize = ftp.size(dst_filename)

            mon = monitor()
            while dst_filesize > f.tell():
                try:
                    connect()
                    self.waiting = False
                    # retrieve file from position where we were disconnected
                    res = ftp.retrbinary('RETR %s' % dst_filename, f.write) if f.tell() == 0 else \
                              ftp.retrbinary('RETR %s' % dst_filename, f.write, rest=f.tell())

                except:
                    self.max_attempts -= 1
                    if self.max_attempts == 0:
                        mon.set()
                        logging.exception('')
                        raise
                    self.waiting = True
                    logging.info('waiting 30 sec...')
                    time.sleep(30)
                    logging.info('reconnect')

            mon.set()  #stop monitor
            ftp.close()

            if not res.startswith('226 Transfer complete'):
                logging.error(
                    'Downloaded file {0} is not full.'.format(dst_filename))
                os.remove(local_filename)
                return None

            return 1
示例#33
0
class FTPSession(object):
    """ Attempt to create some robustness and performance to FTPing """
    def __init__(self, server, username, password, tmpdir='/tmp', timeout=60):
        """Build a FTP session """
        self.conn = None
        self.server = server
        self.username = username
        self.password = password
        self.tmpdir = tmpdir
        self.timeout = timeout

    def _connect(self):
        """Connect to FTP server """
        if self.conn is not None:
            return
        logging.debug('Creating new connection to server %s', self.server)
        not_connected = True
        attempt = 1
        while not_connected and attempt < 6:
            try:
                self.conn = FTP_TLS(self.server, timeout=self.timeout)
                self.conn.login(self.username, self.password)
                self.conn.prot_p()
                not_connected = False
            except Exception as exp:
                logging.debug(exp)
                time.sleep(5)
                self.close()
            attempt += 1
        if not_connected is True:
            raise Exception("Failed to make FTP connection after 5 tries!")

    def _reconnect(self):
        """ First attempt to shut down connection and then reconnect """
        logging.debug('_reconnect() was called...')
        try:
            self.conn.quit()
            self.conn.close()
        except:
            pass
        finally:
            self.conn = None
        self._connect()

    def _put(self, path, localfn, remotefn):
        """ """
        self.chdir(path)
        sz = os.path.getsize(localfn)
        if sz > 14000000000:
            # Step 1 Split this big file into 14GB chunks, each file will have
            # suffix .aa then .ab then .ac etc
            basefn = os.path.basename(localfn)
            cmd = "split --bytes=14000M %s %s/%s." % (localfn, self.tmpdir,
                                                      basefn)
            subprocess.call(cmd, shell=True, stderr=subprocess.PIPE)
            files = glob.glob("%s/%s.??" % (self.tmpdir, basefn))
            for filename in files:
                suffix = filename.split(".")[-1]
                self.conn.storbinary('STOR %s.%s' % (remotefn, suffix),
                                     open(filename))
                os.unlink(filename)
        else:
            logging.debug("_put '%s' to '%s'", localfn, remotefn)
            self.conn.storbinary('STOR %s' % (remotefn, ), open(localfn))
        return True

    def close(self):
        """ Good bye """
        try:
            self.conn.quit()
            self.conn.close()
        except:
            pass
        finally:
            self.conn = None

    def chdir(self, path):
        if self.pwd() == path.rstrip("/"):
            return
        self.conn.cwd("/")
        for dirname in path.split("/"):
            if dirname == '':
                continue
            bah = []
            self.conn.retrlines('NLST', bah.append)
            if dirname not in bah:
                logging.debug("Creating directory '%s'", dirname)
                self.conn.mkd(dirname)
            logging.debug("Changing to directory '%s'", dirname)
            self.conn.cwd(dirname)

    def pwd(self):
        """ Low friction function to get connectivity """
        self._connect()
        pwd = exponential_backoff(self.conn.pwd)
        if pwd is None:
            self._reconnect()
            pwd = exponential_backoff(self.conn.pwd)
        logging.debug("pwd() is currently '%s'", pwd)
        return pwd

    def put_file(self, path, localfn, remotefn):
        """ Put the File """
        res = exponential_backoff(self._put, path, localfn, remotefn)
        if not res:
            self._reconnect()
            res = exponential_backoff(self._put, path, localfn, remotefn)
            if not res:
                logging.error("Double Failure to upload filename: '%s'",
                              localfn)
                return False
        return True

    def put_files(self, path, localfns, remotefns):
        """ Put the File """
        res = []
        for localfn, remotefn in zip(localfns, remotefns):
            res.append(self.put_file(path, localfn, remotefn))
        return res
示例#34
0
class PostDownloader(object):

    def __init__(self, settings: 'Settings', logger, web_queue=None) -> None:
        self.settings = settings
        self.web_queue = web_queue
        self.logger = logger
        self.ftps: Optional[FTP_TLS] = None
        self.current_ftp_dir: Optional[str] = None
        self.current_download: DataDict = {
            'filename': '',
            'blocksize': 0,
            'speed': 0,
            'index': 0,
            'total': 0,
        }

    def process_downloaded_archive(self, archive: Archive) -> None:
        if os.path.isfile(archive.zipped.path):
            except_at_open = False
            return_error = None
            try:
                my_zip = ZipFile(
                    archive.zipped.path, 'r')
                return_error = my_zip.testzip()
                my_zip.close()
            except (BadZipFile, NotImplementedError):
                except_at_open = True
            if except_at_open or return_error:
                if 'panda' in archive.source_type:
                    self.logger.error(
                        "For archive: {}, file check on downloaded zipfile failed on file: {}, "
                        "forcing download as panda_archive to fix it.".format(archive, archive.zipped.path)
                    )
                    crc32 = calc_crc32(
                        archive.zipped.path)
                    Archive.objects.add_or_update_from_values({'crc32': crc32}, pk=archive.pk)
                    if self.web_queue and archive.gallery:
                        temp_settings = Settings(load_from_config=self.settings.config)
                        temp_settings.allow_downloaders_only(['panda_archive'], True, True, True)
                        self.web_queue.enqueue_args_list((archive.gallery.get_link(),), override_options=temp_settings)
                        return
                else:
                    self.logger.warning(
                        "For archive: {}, File check on downloaded zipfile: {}. "
                        "Check the file manually.".format(archive, archive.zipped.path)
                    )
            crc32 = calc_crc32(
                archive.zipped.path)
            filesize = get_zip_filesize(
                archive.zipped.path)
            filecount = filecount_in_zip(
                archive.zipped.path)
            values = {'crc32': crc32,
                      'filesize': filesize,
                      'filecount': filecount,
                      }
            updated_archive = Archive.objects.add_or_update_from_values(
                values, pk=archive.pk)
            if archive.gallery and updated_archive.filesize != updated_archive.gallery.filesize:
                if Archive.objects.filter(gallery=updated_archive.gallery, filesize=updated_archive.gallery.filesize):
                    self.logger.info(
                        "For archive: {} size does not match gallery, "
                        "but there's already another archive that matches.".format(updated_archive)
                    )
                    return
                if 'panda' in archive.source_type:
                    self.logger.info(
                        "For archive: {} size does not match gallery, "
                        "downloading again from panda_archive.".format(updated_archive)
                    )
                    if self.web_queue:
                        temp_settings = Settings(load_from_config=self.settings.config)
                        temp_settings.allow_downloaders_only(['panda_archive'], True, True, True)
                        self.web_queue.enqueue_args_list(
                            (updated_archive.gallery.get_link(), ),
                            override_options=temp_settings
                        )
                else:
                    self.logger.warning(
                        "For archive: {} size does not match gallery. Check the file manually.".format(archive)
                    )

    def write_file_update_progress(self, cmd: str, callback: Callable, filesize: int = 0, blocksize: int = 8192, rest: bool = None) -> str:
        self.ftps.voidcmd('TYPE I')  # type: ignore
        with self.ftps.transfercmd(cmd, rest) as conn:  # type: ignore
            self.current_download['filesize'] = filesize
            self.current_download['downloaded'] = 0
            self.current_download['filename'] = cmd.replace('RETR ', '')
            start = time.clock()
            while 1:
                data = conn.recv(blocksize)
                if not data:
                    break
                downloaded = len(data)
                self.current_download['downloaded'] += downloaded
                current = time.clock()
                if current > start:
                    self.current_download['speed'] = self.current_download['downloaded'] / ((current - start) * 1024)
                callback(data)
            self.current_download['filename'] = ''
            self.current_download['speed'] = 0
            self.current_download['filesize'] = 0
            # shutdown ssl layer
            if _SSLSocket is not None and isinstance(conn, _SSLSocket):
                conn.unwrap()
        return self.ftps.voidresp()  # type: ignore

    def start_connection(self) -> None:
        if self.settings.ftps['no_certificate_check']:
            context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
            context.verify_mode = ssl.CERT_NONE
            context.check_hostname = False
        else:
            context = ssl.create_default_context()
        self.ftps = FTP_TLS(
            host=self.settings.ftps['address'],
            user=self.settings.ftps['user'],
            passwd=self.settings.ftps['passwd'],
            context=context,
            source_address=self.settings.ftps['source_address'],
            timeout=self.settings.timeout_timer
        )

        # Hath downloads
        self.ftps.prot_p()

    def set_current_dir(self, self_dir: str) -> None:
        self.current_ftp_dir = self_dir
        if not self.ftps:
            return None
        self.ftps.cwd(self_dir)

    def download_all_missing(self, archives: Iterable[Archive] = None) -> None:

        files_torrent = []
        files_hath = []

        if not archives:
            found_archives: Iterable[Archive] = list(Archive.objects.filter_by_dl_remote())
        else:
            found_archives = archives

        if not found_archives:
            return

        for archive in found_archives:
            if 'torrent' in archive.match_type:
                files_torrent.append(archive)
            elif 'hath' in archive.match_type:
                files_hath.append(archive)

        if len(files_torrent) + len(files_hath) == 0:
            return

        self.start_connection()

        if not self.ftps:
            self.logger.error(
                "Cannot download the archives, the FTP connection is not initialized."
            )
            return None

        # Hath downloads
        if len(files_hath) > 0:
            self.set_current_dir(self.settings.providers['panda'].remote_hath_dir)
            # self.ftps.encoding = 'utf8'

            files_matched_hath = []
            for line in self.ftps.mlsd(facts=["type"]):
                if line[1]["type"] != 'dir':
                    continue
                m = re.search(r'.*?\[(\d+)\]$', line[0])
                if m:
                    for archive in files_hath:
                        if m.group(1) == archive.gallery.gid:
                            files_matched_hath.append(
                                (line[0], archive.zipped.path, int(archive.filesize), archive))

            for matched_file_hath in files_matched_hath:
                total_remote_size = 0
                remote_ftp_tuples = []
                for img_file_tuple in self.ftps.mlsd(path=matched_file_hath[0], facts=["type", "size"]):
                    if img_file_tuple[1]["type"] != 'file' or img_file_tuple[0] == 'galleryinfo.txt':
                        continue
                    total_remote_size += int(img_file_tuple[1]["size"])
                    remote_ftp_tuples.append((img_file_tuple[0], img_file_tuple[1]["size"]))
                if total_remote_size != matched_file_hath[2]:
                    self.logger.info(
                        "For archive: {archive}, remote folder: {folder} "
                        "has not completed the download ({current}/{total}), skipping".format(
                            archive=matched_file_hath[3],
                            folder=matched_file_hath[0],
                            current=filesizeformat(total_remote_size),
                            total=filesizeformat(matched_file_hath[2])
                        )
                    )
                    continue
                self.logger.info(
                    "For archive: {archive}, downloading and creating zip "
                    "for folder {filename}, {image_count} images".format(
                        archive=matched_file_hath[3],
                        filename=matched_file_hath[1],
                        image_count=len(remote_ftp_tuples)
                    ))
                dir_path = mkdtemp()
                self.current_download['total'] = len(remote_ftp_tuples)
                for count, remote_file in enumerate(sorted(remote_ftp_tuples), start=1):
                    for retry_count in range(10):
                        try:
                            with open(os.path.join(dir_path, remote_file[0]), "wb") as file:
                                self.current_download['index'] = count
                                self.write_file_update_progress(
                                    'RETR %s' % (str(matched_file_hath[0]) + "/" + remote_file[0]),
                                    file.write,
                                    int(remote_file[1])
                                )
                        except (ConnectionResetError, socket.timeout, TimeoutError):
                            self.logger.error("Hath download failed for file {} of {}, restarting connection...".format(
                                count,
                                len(remote_ftp_tuples))
                            )
                            self.ftps.close()
                            self.start_connection()
                            self.set_current_dir(self.settings.providers['panda'].remote_hath_dir)
                        else:
                            break
                with ZipFile(os.path.join(self.settings.MEDIA_ROOT,
                                          matched_file_hath[1]),
                             'w') as archive_file:
                    for (root_path, _, file_names) in os.walk(dir_path):
                        for current_file in file_names:
                            archive_file.write(
                                os.path.join(root_path, current_file), arcname=os.path.basename(current_file))
                shutil.rmtree(dir_path, ignore_errors=True)

                self.process_downloaded_archive(matched_file_hath[3])

        # Torrent downloads
        if len(files_torrent) > 0:
            self.set_current_dir(self.settings.ftps['remote_torrent_dir'])
            self.ftps.encoding = 'utf8'
            files_matched_torrent = []
            for line in self.ftps.mlsd(facts=["type", "size"]):
                if not line[0]:
                    continue
                if 'type' not in line[1]:
                    continue
                if line[1]["type"] != 'dir' and line[1]["type"] != 'file':
                    continue
                for archive in files_torrent:
                    if archive.gallery:
                        cleaned_torrent_name = os.path.splitext(
                            os.path.basename(archive.zipped.path))[0].replace(' [' + archive.gallery.gid + ']', '')
                    else:
                        cleaned_torrent_name = os.path.splitext(os.path.basename(archive.zipped.path))[0]
                    if replace_illegal_name(os.path.splitext(line[0])[0]) in cleaned_torrent_name:
                        if line[1]["type"] == 'dir':
                            files_matched_torrent.append((line[0], line[1]["type"], 0, archive))
                        else:
                            files_matched_torrent.append((line[0], line[1]["type"], int(line[1]["size"]), archive))
            for matched_file_torrent in files_matched_torrent:
                if matched_file_torrent[1] == 'dir':
                    dir_path = mkdtemp()
                    remote_ftp_files = list(self.ftps.mlsd(path=matched_file_torrent[0], facts=["type", "size"]))
                    self.current_download['total'] = len(remote_ftp_files)
                    self.logger.info(
                        "For archive: {archive}, downloading and creating zip "
                        "for folder {filename}, {image_count} images".format(
                            archive=matched_file_torrent[3],
                            filename=matched_file_torrent[0],
                            image_count=len(remote_ftp_files)
                        ))
                    for count, img_file_tuple in enumerate(remote_ftp_files):
                        if img_file_tuple[1]["type"] != 'file':
                            continue
                        for retry_count in range(10):
                            try:
                                with open(os.path.join(dir_path, img_file_tuple[0]), "wb") as file:
                                    self.current_download['index'] = count
                                    self.write_file_update_progress(
                                        'RETR %s' % (str(matched_file_torrent[0]) + "/" + img_file_tuple[0]),
                                        file.write,
                                        int(img_file_tuple[1]["size"])
                                    )
                            except (ConnectionResetError, socket.timeout, TimeoutError):
                                self.logger.error("Torrent download failed for folder, restarting connection...")
                                self.ftps.close()
                                self.start_connection()
                                self.set_current_dir(self.settings.ftps['remote_torrent_dir'])
                            else:
                                break
                    with ZipFile(matched_file_torrent[3].zipped.path, 'w') as archive_file:
                        for (root_path, _, file_names) in os.walk(dir_path):
                            for current_file in file_names:
                                archive_file.write(
                                    os.path.join(root_path, current_file), arcname=os.path.basename(current_file))
                    shutil.rmtree(dir_path, ignore_errors=True)
                else:
                    self.logger.info(
                        "For archive: {archive} downloading remote file: {remote} to local file: {local}".format(
                            archive=matched_file_torrent[3],
                            remote=matched_file_torrent[0],
                            local=matched_file_torrent[3].zipped.path
                        )
                    )
                    self.current_download['total'] = 1
                    for retry_count in range(10):
                        try:
                            with open(matched_file_torrent[3].zipped.path, "wb") as file:
                                self.current_download['index'] = 1
                                self.write_file_update_progress(
                                    'RETR %s' % matched_file_torrent[0], file.write, matched_file_torrent[2])
                        except (ConnectionResetError, socket.timeout, TimeoutError):
                            self.logger.error("Torrent download failed for archive, restarting connection...")
                            self.ftps.close()
                            self.start_connection()
                            self.set_current_dir(self.settings.ftps['remote_torrent_dir'])
                        else:
                            break
                    if self.settings.convert_rar_to_zip and os.path.splitext(matched_file_torrent[0])[1].lower() == ".rar":
                        self.logger.info(
                            "For archive: {}, converting rar: {} to zip".format(
                                matched_file_torrent[3],
                                matched_file_torrent[3].zipped.path
                            )
                        )
                        convert_rar_to_zip(matched_file_torrent[3].zipped.path)

                self.process_downloaded_archive(matched_file_torrent[3])

        self.ftps.close()

    def copy_all_missing(self, mode, archives: Iterable[Archive] = None):
        files_torrent = []
        files_hath = []

        if not archives:
            found_archives: Iterable[Archive] = list(Archive.objects.filter_by_dl_remote())
        else:
            found_archives = archives

        if not found_archives:
            return

        for archive in found_archives:
            if not os.path.isfile(archive.zipped.path):
                if 'torrent' in archive.match_type:
                    files_torrent.append(archive)
                elif 'hath' in archive.match_type:
                    files_hath.append(archive)

        if len(files_torrent) + len(files_hath) == 0:
            return

        # Hath downloads
        if len(files_hath) > 0:
            files_matched_hath = []
            for matched_file in os.listdir(self.settings.providers['panda'].local_hath_folder):
                if os.path.isfile(os.path.join(self.settings.providers['panda'].local_hath_folder, matched_file)):
                    continue
                m = re.search(r'.*?\[(\d+)\]$', matched_file)
                if m:
                    for archive in files_hath:
                        if m.group(1) == archive.gallery.gid:
                            files_matched_hath.append(
                                [matched_file, archive.zipped.path, int(archive.filesize), archive])

            for img_dir in files_matched_hath:
                total_remote_size = 0
                remote_files = []
                directory = os.path.join(self.settings.providers['panda'].local_hath_folder, img_dir[0])
                for img_file in os.listdir(directory):
                    if not os.path.isfile(os.path.join(directory, img_file)) or img_file == 'galleryinfo.txt':
                        continue
                    total_remote_size += os.stat(
                        os.path.join(directory, img_file)).st_size
                    remote_files.append(
                        os.path.join(directory, img_file))
                if total_remote_size != img_dir[2]:
                    self.logger.info(
                        "For archive: {archive}, folder: {folder} "
                        "has not completed the download ({current}/{total}), skipping".format(
                            archive=img_dir[3],
                            folder=img_dir[0],
                            current=filesizeformat(total_remote_size),
                            total=filesizeformat(img_dir[2])
                        )
                    )
                    continue
                self.logger.info(
                    "For archive: {archive}, creating zip "
                    "for folder {filename}, {image_count} images".format(
                        archive=img_dir[3],
                        filename=img_dir[1],
                        image_count=len(remote_files)
                    ))
                dir_path = mkdtemp()
                for img_file_original in remote_files:
                    img_file = os.path.split(img_file_original)[1]
                    if mode == 'local_move':
                        shutil.move(img_file_original, os.path.join(dir_path, img_file))
                    else:
                        shutil.copy(img_file_original, os.path.join(dir_path, img_file))
                with ZipFile(os.path.join(self.settings.MEDIA_ROOT,
                                          img_dir[1]),
                             'w') as archive_file:
                    for (root_path, _, file_names) in os.walk(dir_path):
                        for current_file in file_names:
                            archive_file.write(
                                os.path.join(root_path, current_file), arcname=os.path.basename(current_file))
                shutil.rmtree(dir_path, ignore_errors=True)

                self.process_downloaded_archive(img_dir[3])

        # Torrent downloads
        if len(files_torrent) > 0:
            files_matched_torrent = []
            for filename in os.listdir(self.settings.torrent['download_dir']):
                for archive in files_torrent:
                    if archive.gallery:
                        cleaned_torrent_name = os.path.splitext(
                            os.path.basename(archive.zipped.path))[0].replace(' [' + archive.gallery.gid + ']', '')
                    else:
                        cleaned_torrent_name = os.path.splitext(os.path.basename(archive.zipped.path))[0]
                    if replace_illegal_name(os.path.splitext(filename)[0]) in cleaned_torrent_name:
                        files_matched_torrent.append([filename, not os.path.isfile(
                            os.path.join(self.settings.torrent['download_dir'], filename)), archive])

            for matched_file in files_matched_torrent:
                target = os.path.join(self.settings.torrent['download_dir'], matched_file[0])
                if matched_file[1]:
                    self.logger.info(
                        "For archive: {archive}, creating zip for folder: {filename}".format(
                            archive=matched_file[2],
                            filename=matched_file[0],
                        ))
                    dir_path = mkdtemp()
                    for img_file in os.listdir(target):
                        if not os.path.isfile(os.path.join(target, img_file)):
                            continue
                        if mode == 'local_move':
                            shutil.move(os.path.join(target, img_file), os.path.join(dir_path, img_file))
                        else:
                            shutil.copy(os.path.join(target, img_file), os.path.join(dir_path, img_file))

                    with ZipFile(matched_file[2].zipped.path, 'w') as archive_file:
                        for (root_path, _, file_names) in os.walk(dir_path):
                            for current_file in file_names:
                                archive_file.write(
                                    os.path.join(root_path, current_file), arcname=os.path.basename(current_file))
                    shutil.rmtree(dir_path, ignore_errors=True)
                else:
                    self.logger.info(
                        "For archive: {archive}, downloading file: {filename}".format(
                            archive=matched_file[2],
                            filename=matched_file[0],
                        ))
                    if mode == 'local_move':
                        shutil.move(target, matched_file[2].zipped.path)
                    else:
                        shutil.copy(target, matched_file[2].zipped.path)
                    if self.settings.convert_rar_to_zip and os.path.splitext(matched_file[0])[1].lower() == ".rar":
                        self.logger.info(
                            "For archive: {}, converting rar: {} to zip".format(
                                matched_file[2],
                                matched_file[2].zipped.path
                            )
                        )
                        convert_rar_to_zip(matched_file[2].zipped.path)

                self.process_downloaded_archive(matched_file[2])

    def transfer_all_missing(self, archives: Iterable[Archive] = None) -> None:

        if self.settings.download_handler.startswith('local'):
            self.copy_all_missing(self.settings.download_handler, archives)
        else:
            for retry_count in range(3):
                try:
                    self.download_all_missing(archives)
                except (ConnectionResetError, socket.timeout, TimeoutError) as e:
                    self.logger.error(
                        "Download failed, restarting connection. Retry: {} of 3. Error: {}".format(retry_count + 1, e)
                    )
                else:
                    return
            self.logger.error("Download failed, restart limit reached (3), ending")
示例#35
0
文件: wp_backup.py 项目: hanroy/OC-P6
    ftps.cwd(folder)


def placeFiles():
    for file in os.listdir(wp_backup_path):
        localpath = os.path.join(wp_backup_path, file)
        if os.path.isfile(localpath):
            filesize = os.path.getsize(localpath)
            #print("STOR", file, localpath)
            with tqdm(unit='blocks',
                      unit_scale=True,
                      leave=False,
                      miniters=1,
                      desc='Uploading Files......',
                      total=filesize) as tqdm_instance:
                ftps.storbinary(
                    'STOR ' + file,
                    open(localpath, 'rb'),
                    128,
                    callback=lambda sent: tqdm_instance.update(len(sent)))


#print('Send Wordpress backup folder in progress ..')
lg.info('Send Wordpress backup folder in progress ..')
placeFiles()
lg.debug('Wordpress backup folder successfully sent ')
#print('Quit FTP connection')
ftps.quit()
lg.info('Closing FTP connection')
ftps.close()  #close connection
示例#36
0
class FtpService:

    def __init__(self, config: FtpConfiguration):
        self.config = config
        self.__connect()

    def __connect(self) -> None:
        if self.config.protocol == "FTP":
            self.ftp = FTP()
        elif self.config.protocol == "SFTP":
            self.ftp = FTP_TLS()
        else:
            raise UnknownFtpProtocolException(self.config.protocol)
        self.ftp.connect(host=self.config.host, port=self.config.port, timeout=10)
        resp: str = self.ftp.login(self.config.username, self.config.passwd)
        if resp.startswith("230"):
            logger.info("Successfully connect to FTP server")
        else:
            raise CannotConnectToFtpException(self.config)

    def read_file_items(self, path: str) -> List[FileItem]:
        files = list()
        items = list()
        self.ftp.cwd(path)
        self.ftp.dir(items.append)
        for item in items:
            name, is_file, size = self.__parse_list_line(item)
            if is_file:
                file_path = path + "/" + name
                mdt = self.ftp.sendcmd("MDTM " + file_path)
                dt = self.__parse_mdt(mdt)
                files.append(FileItem(file_path, dt, int(size)))
        return files

    def download_file(self, ftp_path: str, store_path: str) -> FileItem:
        a_store_path = pth.abspath(store_path)
        logger.info("Try download file from ftp \"%s\" to \"%s\"" % (ftp_path, a_store_path))
        try:
            self.ftp.retrbinary("RETR " + ftp_path, open(a_store_path, 'wb').write)
        except Exception as ex:
            logger.error("Cannot download file", ex)
            raise ex
        return FileSystemHelper.read_file_item(a_store_path)

    def close(self):
        self.ftp.close()
        logger.info("Close ftp connection with server")

    @staticmethod
    def __parse_list_line(line: str) -> tuple:
        items = line.split()
        is_file = True
        if items[0].startswith("d"):
            is_file = False
        size = items[4]
        name = items[8:]
        return str.join(" ", name), is_file, size

    @staticmethod
    def __parse_mdt(mdt: str) -> datetime:
        items = mdt.split()
        dt = items[1]
        year = int(dt[0:4])
        month = int(dt[4:6])
        day = int(dt[6:8])
        hour = int(dt[8:10])
        minute = int(dt[10:12])
        second = int(dt[12:14])
        return datetime(year, month, day, hour, minute, second)