def mvtrash(_opts, _args): for d, b, p in Utils.iterate_dirs(_args, _dirs=True, _files=False): if b == Utils.trashdir: m,n = os.path.splitdrive(os.path.abspath(p)) l = os.path.abspath(d).split(os.sep)[1:] q = os.path.join(m, os.sep, Utils.trashdir, *l) logging.info("mvtrash %s -> %s" % (p, q)) os.renames(p, q)
def scan(_opts, _args): for d, b, p in Utils.iterate_dirs(_args, _dirs=False, _files=True): s = checksum(p) logging.debug("cksum=%s fname=%s dpath=%s" % (s, b, d)) row = Sums.get(fname=b, dpath=d) if not row: Sums(cksum=s, fname=b, dpath=d) else: row.cksum = s commit()
def find(_opts, _args): for d, b, q in Utils.iterate_dirs(_args, _dirs=False, _files=True): ## for p in _args: ## for t, d, f in os.walk(p): ## Utils.prune(d) ## for y in f: ## q = os.path.join(t, y) # skip files that are too large, or empty if _opts.maxsize and os.path.getsize(q) > _opts.maxsize \ or os.path.getsize(q) == 0 : logging.warn("Skipping %s %d" % (q, os.path.getsize(q))) continue # calculate checksum s = checksum(q) # remove entries for which the corresponding file is missing existing_entries = [] for p in Sums.find(s): # p is pathname (dirpath + filename) if p==q: continue if not os.path.isfile(p): Sums.remove(p) else: existing_entries.append(p) # if we actually found something existing if existing_entries: logging.debug("duplicate files:") logging.debug("\t%s" % (q)) for x in existing_entries: logging.debug("\t%s" % (x)) Counters.counts["bytes_saved"] += os.path.getsize(q) Journal.emit(Journal.Move(q, Utils.trashpath(q))) # remove the file (must be explicitly asked for) if _opts.delete: Utils.trash(q) Sums.remove(q) logging.info("%d bytes saved" % Counters.counts["bytes_saved"])