def set_sync_info(self, filename, mtime, size): """Store mtime/size when local and remote file was last synchronized. This is stored in the local file's folder as meta data. The information is used to detect conflicts, i.e. if both source and remote had been modified by other means since last synchronization. """ assert self.target.is_local() remote_target = self.target.peer ps = self.dir["peer_sync"].setdefault(remote_target.get_id(), {}) ut = time.time() # UTC time stamp ps[":last_sync"] = ut # this is an invalid file name to avoid conflicts pse = ps[filename] = {"m": mtime, "s": size, "u": ut} if self.PRETTY: ps[":last_sync_str"] = pretty_stamp( ut) # use an invalid file name to avoid conflicts pse["mtime_str"] = pretty_stamp(mtime) if mtime else "(directory)" pse["uploaded_str"] = pretty_stamp(ut) self.modified_sync = True
def flush(self): """Write self to .pyftpsync-meta.json.""" # We DO write meta files even on read-only targets, but not in dry-run mode # if self.target.readonly: # write("DirMetadata.flush(%s): read-only; nothing to do" % self.target) # return assert self.path == self.target.cur_dir if self.target.dry_run: # write("DirMetadata.flush(%s): dry-run; nothing to do" % self.target) pass elif self.was_read and len(self.list) == 0 and len( self.peer_sync) == 0: write("Remove empty meta data file: {}".format(self.target)) self.target.remove_file(self.filename) elif not self.modified_list and not self.modified_sync: # write("DirMetadata.flush(%s): unmodified; nothing to do" % self.target) pass else: self.dir[ "_disclaimer"] = "Generated by https://github.com/mar10/pyftpsync" self.dir["_time_str"] = pretty_stamp(time.time()) self.dir["_file_version"] = self.VERSION self.dir["_version"] = __version__ self.dir["_time"] = time.mktime(time.gmtime()) # We always save utf-8 encoded. # `ensure_ascii` would escape all bytes >127 as `\x12` or `\u1234`, # which makes it hard to read, so we set it to false. # `sort_keys` converts binary keys to unicode using utf-8, so we # must make sure that we don't pass cp1225 or other encoded data. data = self.dir opts = {"indent": 4, "sort_keys": True, "ensure_ascii": False} # if compat.PY2: # # The `encoding` arg defaults to utf-8 on Py2 and was removed in Py3 # # opts["encoding"] = "utf-8" # # Python 2 has problems with mixed keys (str/unicode) # data = decode_dict_keys(data, "utf-8") if not self.PRETTY: opts["indent"] = None opts["separators"] = (",", ":") s = json.dumps(data, **opts) self.target.write_text(self.filename, s) if self.target.synchronizer: self.target.synchronizer._inc_stat("meta_bytes_written", len(s)) self.modified_list = False self.modified_sync = False
def set_sync_info(self, filename, mtime, size): """Store mtime/size when local and remote file was last synchronized. This is stored in the local file's folder as meta data. The information is used to detect conflicts, i.e. if both source and remote had been modified by other means since last synchronization. """ assert self.target.is_local() remote_target = self.target.peer ps = self.dir["peer_sync"].setdefault(remote_target.get_id(), {}) ut = time.time() # UTC time stamp ps[":last_sync"] = ut # this is an invalid file name to avoid conflicts pse = ps[filename] = {"m": mtime, "s": size, "u": ut} if self.PRETTY: ps[":last_sync_str"] = pretty_stamp( ut ) # use an invalid file name to avoid conflicts pse["mtime_str"] = pretty_stamp(mtime) if mtime else "(directory)" pse["uploaded_str"] = pretty_stamp(ut) self.modified_sync = True
def set_mtime(self, filename, mtime, size): """Store real file mtime in meta data. This is needed on FTP targets, because FTP servers don't allow to set file mtime, but use to the upload time instead. We also record size and upload time, so we can detect if the file was changed by other means and we have to discard our meta data. """ ut = time.time() # UTC time stamp self.list[filename] = { "m": mtime, "s": size, "u": ut, } if self.PRETTY: self.list[filename].update({ "mtime_str": pretty_stamp(mtime), "uploaded_str": pretty_stamp(ut), }) self.modified_list = True
def set_mtime(self, filename, mtime, size): """Store real file mtime in meta data. This is needed on FTP targets, because FTP servers don't allow to set file mtime, but use to the upload time instead. We also record size and upload time, so we can detect if the file was changed by other means and we have to discard our meta data. """ ut = time.time() # UTC time stamp if self.target.server_time_ofs: # We add the estimated time offset, so the stored 'u' time stamp matches # better the mtime value that the server will generate for that file ut += self.target.server_time_ofs self.list[filename] = {"m": mtime, "s": size, "u": ut} if self.PRETTY: self.list[filename].update( {"mtime_str": pretty_stamp(mtime), "uploaded_str": pretty_stamp(ut)} ) # print("set_mtime", self.list[filename]) self.modified_list = True
def flush(self): """Write self to .pyftpsync-meta.json.""" # We DO write meta files even on read-only targets, but not in dry-run mode # if self.target.readonly: # write("DirMetadata.flush(%s): read-only; nothing to do" % self.target) # return assert self.path == self.target.cur_dir if self.target.dry_run: # write("DirMetadata.flush(%s): dry-run; nothing to do" % self.target) pass elif self.was_read and len(self.list) == 0 and len(self.peer_sync) == 0: write("Remove empty meta data file: {}".format(self.target)) self.target.remove_file(self.filename) elif not self.modified_list and not self.modified_sync: # write("DirMetadata.flush(%s): unmodified; nothing to do" % self.target) pass else: self.dir["_disclaimer"] = "Generated by https://github.com/mar10/pyftpsync" self.dir["_time_str"] = pretty_stamp(time.time()) self.dir["_file_version"] = self.VERSION self.dir["_version"] = __version__ self.dir["_time"] = time.mktime(time.gmtime()) # We always save utf-8 encoded. # `ensure_ascii` would escape all bytes >127 as `\x12` or `\u1234`, # which makes it hard to read, so we set it to false. # `sort_keys` converts binary keys to unicode using utf-8, so we # must make sure that we don't pass cp1225 or other encoded data. data = self.dir opts = {"indent": 4, "sort_keys": True, "ensure_ascii": False} if compat.PY2: # The `encoding` arg defaults to utf-8 on Py2 and was removed in Py3 # opts["encoding"] = "utf-8" # Python 2 has problems with mixed keys (str/unicode) data = decode_dict_keys(data, "utf-8") if not self.PRETTY: opts["indent"] = None opts["separators"] = (",", ":") s = json.dumps(data, **opts) self.target.write_text(self.filename, s) if self.target.synchronizer: self.target.synchronizer._inc_stat("meta_bytes_written", len(s)) self.modified_list = False self.modified_sync = False
def set_mtime(self, filename, mtime, size): """Store real file mtime in meta data. This is needed on FTP targets, because FTP servers don't allow to set file mtime, but use to the upload time instead. We also record size and upload time, so we can detect if the file was changed by other means and we have to discard our meta data. """ ut = time.time() # UTC time stamp if self.target.server_time_ofs: # We add the estimated time offset, so the stored 'u' time stamp matches # better the mtime value that the server will generate for that file ut += self.target.server_time_ofs self.list[filename] = {"m": mtime, "s": size, "u": ut} if self.PRETTY: self.list[filename].update({ "mtime_str": pretty_stamp(mtime), "uploaded_str": pretty_stamp(ut) }) # print("set_mtime", self.list[filename]) self.modified_list = True
def flush(self): """Write self to .pyftpsync-meta.json.""" # We DO write meta files even on read-only targets, but not in dry-run mode # if self.target.readonly: # write("DirMetadata.flush(%s): read-only; nothing to do" % self.target) # return assert self.path == self.target.cur_dir if self.target.dry_run: # write("DirMetadata.flush(%s): dry-run; nothing to do" % self.target) pass elif self.was_read and len(self.list) == 0 and len( self.peer_sync) == 0: # write("DirMetadata.flush(%s): DELETE" % self.target) self.target.remove_file(self.filename) elif not self.modified_list and not self.modified_sync: # write("DirMetadata.flush(%s): unmodified; nothing to do" % self.target) pass else: self.dir[ "_disclaimer"] = "Generated by https://github.com/mar10/pyftpsync" self.dir["_time_str"] = pretty_stamp(time.time()) self.dir["_file_version"] = self.VERSION self.dir["_version"] = __version__ self.dir["_time"] = time.mktime(time.gmtime()) if self.PRETTY: s = json.dumps(self.dir, indent=4, sort_keys=True) else: s = json.dumps(self.dir, sort_keys=True) # write("DirMetadata.flush(%s)" % (self.target, ))#, s) self.target.write_text(self.filename, s) if self.target.synchronizer: self.target.synchronizer._inc_stat("meta_bytes_written", len(s)) self.modified_list = False self.modified_sync = False
def scan_handler(args): """Implement `cleanup` sub-command.""" opts = namespace_to_dict(args) opts.update({ "ftp_debug": args.verbose >= 6, }) target = make_target(args.target, opts) target.readonly = True root_depth = target.root_dir.count("/") start = time.time() dir_count = 1 file_count = 0 processed_files = set() opts = namespace_to_dict(args) process_options(opts) def _pred(entry): """Walker predicate that check match/exclude options.""" if not match_path(entry, opts): return False try: target.open() for e in target.walk(recursive=args.recursive, pred=_pred): is_dir = isinstance(e, DirectoryEntry) indent = " " * (target.cur_dir.count("/") - root_depth) if is_dir: dir_count += 1 else: file_count += 1 if args.list: if is_dir: print(indent, "[{e.name}]".format(e=e)) else: delta = e.mtime_org - e.mtime dt_modified = pretty_stamp(e.mtime) if delta: prefix = "+" if delta > 0 else "" print( indent, "{e.name:<40} {dt_modified} (system: {prefix}{delta})" .format(e=e, prefix=prefix, delta=timedelta(seconds=delta), dt_modified=dt_modified)) else: print( indent, "{e.name:<40} {dt_modified}".format( e=e, dt_modified=dt_modified)) if args.remove_meta and target.cur_dir_meta and target.cur_dir_meta.was_read: fspec = target.cur_dir_meta.get_full_path() if fspec not in processed_files: processed_files.add(fspec) print("DELETE {}".format(fspec)) if args.remove_locks and not is_dir and e.name == DirMetadata.LOCK_FILE_NAME: fspec = e.get_rel_path() print("DELETE {}".format(fspec)) finally: target.close() print("Scanning {:,} files in {:,} directories took {:02.2f} seconds.". format(file_count, dir_count, time.time() - start))
def scan_handler(parser, args): """Implement `scan` sub-command.""" opts = namespace_to_dict(args) opts.update({"ftp_debug": args.verbose >= 6}) target = make_target(args.target, opts) target.readonly = True root_depth = target.root_dir.count("/") start = time.time() dir_count = 1 file_count = 0 processed_files = set() opts = namespace_to_dict(args) process_options(opts) def _pred(entry): """Walker predicate that check match/exclude options.""" if not match_path(entry, opts): return False try: target.open() for e in target.walk(recursive=args.recursive, pred=_pred): is_dir = isinstance(e, DirectoryEntry) indent = " " * (target.cur_dir.count("/") - root_depth) if is_dir: dir_count += 1 else: file_count += 1 if args.list: if is_dir: print(indent, "[{e.name}]".format(e=e)) else: delta = e.mtime_org - e.mtime dt_modified = pretty_stamp(e.mtime) if delta: prefix = "+" if delta > 0 else "" print( indent, "{e.name:<40} {dt_modified} (system: {prefix}{delta})".format( e=e, prefix=prefix, delta=timedelta(seconds=delta), dt_modified=dt_modified, ), ) else: print( indent, "{e.name:<40} {dt_modified}".format( e=e, dt_modified=dt_modified ), ) if ( args.remove_meta and target.cur_dir_meta and target.cur_dir_meta.was_read ): fspec = target.cur_dir_meta.get_full_path() if fspec not in processed_files: processed_files.add(fspec) print("DELETE {}".format(fspec)) if ( args.remove_locks and not is_dir and e.name == DirMetadata.LOCK_FILE_NAME ): fspec = e.get_rel_path() print("DELETE {}".format(fspec)) finally: target.close() print( "Scanning {:,} files in {:,} directories took {:02.2f} seconds.".format( file_count, dir_count, time.time() - start ) )