import Cache.sqlite as sqlite import os, datetime parser = argparse.ArgumentParser(description='Create the sqlite DB') parser.add_argument('--cache', dest='cache', action='store', type=str, default='',help='TODO') parser.add_argument('--data', dest='data', action='store', type=str, default='',help='TODO') parser.add_argument('--mode', dest='mode', action='store', type=str, default='auto',help='TODO') args = vars(parser.parse_args()) cache = sqlite.sqlite(); cache.setCacheLocation(args['cache']) cache.initialize() analyzer = BackupAnalyzer() analyzer.setCache(cache) analyzer.initialize() if args['mode']=='auto': print "items total: %d files" % (analyzer.getFilesCount()) print "items total: %d dirs" % (analyzer.getDirsCount()) print "size total: %s" % (humanize.naturalsize(analyzer.getTotalSize())) print " avg size: %s" % (humanize.naturalsize(analyzer.getAvgSize())) print " median size: %s" % (humanize.naturalsize(analyzer.getMedianSize())) print "duplicated files total: %s files" % (analyzer.getDuplicatedFilesCount()) print "duplicated files size: %s" % (humanize.naturalsize(analyzer.getDuplicatedFilesSize())) print "duplicated empty files : %s files" % (analyzer.getEmptyFilesCount()) print "largest 10 files: \n %s" % "\n ".join((("%s (%s)" % (os.path.basename(path), humanize.naturalsize(size))) for (size, path) in analyzer.getTop10LargestFiles() if True)) print "empty dirs: %s" % (analyzer.getEmptyDirsCount())