def test_scan_simple(setup): db = core.db.DB(core.metadatapath.MetadataPath(test.create_files.get_metadata_root()), force_drop=True) db.scan(test.create_files.get_simple_root()) a_path = os.path.join(test.create_files.SRC, test.create_files.A_FILE_NAME) assert(a_path != None) if a_path is not None: sha512_val = db.get_file_info(a_path).sha512 assert(sha512_val == test.results.sha512[test.create_files.A_STRING]) db.close()
def test_new_root(setup): db = core.db.DB(core.metadatapath.MetadataPath( test.create_files.get_metadata_root()), force_drop=True) db.scan(test.create_files.get_simple_root()) db = core.db.DB( core.metadatapath.MetadataPath(test.create_files.get_metadata_root())) db.scan(test.create_files.get_unicode_root()) db.close()
def test_scan_simple(setup): db = core.db.DB(core.metadatapath.MetadataPath( test.create_files.get_metadata_root()), force_drop=True) db.scan(test.create_files.get_simple_root()) a_path = os.path.join(test.create_files.SRC, test.create_files.A_FILE_NAME) assert (a_path != None) if a_path is not None: sha512_val = db.get_file_info(a_path).sha512 assert (sha512_val == test.results.sha512[test.create_files.A_STRING]) db.close()
def test_hash_time(setup): db = core.db.DB(core.metadatapath.MetadataPath(os.path.join(test.create_files.get_metadata_root(), 'hashtime'))) db.scan(test.create_files.get_hash_root()) hash_perfs = db.get_hash_perf() # make sure the table has the number of expected entries assert(len(hash_perfs) == core.const.MAX_HASH_PERF_VALUES or len(hash_perfs) == test.const.HASH_TEST_FILE_MAX - test.const.HASH_TEST_FILE_MIN + 1) files_in_hash_perf = [os.path.abspath(os.path.join(test.create_files.get_hash_root(), 'big' + str(i) + '.txt')) for i in range(test.const.HASH_TEST_FILE_MAX-2,test.const.HASH_TEST_FILE_MAX+1)] # make sure the entries in the table are for the largest files for hash_perf in hash_perfs: assert(hash_perf.abspath in files_in_hash_perf) db.close()
def test_hash_time(setup): db = core.db.DB( core.metadatapath.MetadataPath( os.path.join(test.create_files.get_metadata_root(), 'hashtime'))) db.scan(test.create_files.get_hash_root()) hash_perfs = db.get_hash_perf() # make sure the table has the number of expected entries assert (len(hash_perfs) == core.const.MAX_HASH_PERF_VALUES or len(hash_perfs) == test.const.HASH_TEST_FILE_MAX - test.const.HASH_TEST_FILE_MIN + 1) files_in_hash_perf = [ os.path.abspath( os.path.join(test.create_files.get_hash_root(), 'big' + str(i) + '.txt')) for i in range(test.const.HASH_TEST_FILE_MAX - 2, test.const.HASH_TEST_FILE_MAX + 1) ] # make sure the entries in the table are for the largest files for hash_perf in hash_perfs: assert (hash_perf.abspath in files_in_hash_perf) db.close()
from core import larg import core.db import core.util import core.metadatapath if __name__ == "__main__": larg_parser = core.larg.LatusArg("Deduplicate a folder") args = larg_parser.parse() db = core.db.DB(core.metadatapath.MetadataPath(args.metadata)) db.scan(args.path)
def test_new_root(setup): db = core.db.DB(core.metadatapath.MetadataPath(test.create_files.get_metadata_root()), force_drop=True) db.scan(test.create_files.get_simple_root()) db = core.db.DB(core.metadatapath.MetadataPath(test.create_files.get_metadata_root())) db.scan(test.create_files.get_unicode_root()) db.close()
import core.larg import core.db import core.metadatapath if __name__ == "__main__": larg_parser = core.larg.LatusArg("Merge one folder into another", False) larg_parser.parser.add_argument('-s', '--source', metavar='path', required=True, help="source folder") larg_parser.parser.add_argument('-d', '--dest', metavar='path', required=True, help="destination folder") args = larg_parser.parse() db = core.db.DB(core.metadatapath.MetadataPath(args.metadata)) print("scanning", args.source) db.scan(args.source) print("scanning", args.dest) db.scan(args.dest) diff = db.difference(args.source, args.dest) print("to move", diff)