示例#1
0
def run(command, args):
    if command == 'add':
        add.tasks(args.descriptions, args.project)
    if command == 'list':
        listit.list(args)
    if command == 'find':
        find.find(args)
    if command == 'update':
        update.update(args)
    if command == 'delete':
        delete.delete(args)
    if command == 'sync':
        sync.sync(args)
    if command == 'mail':
        mail.send(args)
    if command == 'print':
        print.print(args)
示例#2
0
def test_delete(build_potato_dataset, tree_type, hash_size, distance_metric, nearest_neighbors, leaf_size,
                parallel, batch_size, threshold, backup_keep, backup_duplicate, safe_deletion, expected):
    output_path = mkdir_output(os.path.join(str(PROJECT_DIR), "outputs"))
    df_dataset, img_file_list = build_potato_dataset

    to_keep, to_remove = delete(df_dataset, img_file_list, output_path, hash_size, tree_type, distance_metric,
                                nearest_neighbors, leaf_size, parallel, batch_size, threshold, backup_keep,
                                backup_duplicate, safe_deletion)
    assert len(to_keep) == expected[0]
    assert to_keep[0].split(os.sep)[-1] == expected[1]
    assert to_keep[1].split(os.sep)[-1] == expected[2]
    assert len(to_remove) == expected[3]

    # delete_output(output_path)

    print()
def main(args):

    from _version import get_versions
    __version__ = get_versions()['version']

    dt = str(datetime.datetime.today().strftime('%Y-%m-%d-%H-%M'))

    output_path = os.path.join(args.output_path, dt)
    FileSystem.mkdir_if_not_exist(output_path)

    if args.command == "delete":
        # Config
        images_path = args.images_path
        hash_algo = args.hash_algorithm
        hash_size = args.hash_size
        tree_type = args.tree_type
        distance_metric = args.distance_metric
        nearest_neighbors = args.nearest_neighbors
        leaf_size = args.leaf_size
        parallel = args.parallel
        batch_size = args.batch_size
        threshold = args.threshold
        backup_keep = args.backup_keep
        backup_duplicate = args.backup_duplicate
        safe_deletion = args.safe_deletion
        image_w = args.image_w
        image_h = args.image_h

        df_dataset, img_file_list = ImageToHash(images_path, hash_size=hash_size, hash_algo=hash_algo) \
            .build_dataset(parallel=parallel, batch_size=batch_size)

        delete(df_dataset, img_file_list, output_path, hash_size, tree_type,
               distance_metric, nearest_neighbors, leaf_size, parallel,
               batch_size, threshold, backup_keep, backup_duplicate,
               safe_deletion, image_w, image_h)

    if args.command == "show":
        # Config
        images_path = args.images_path
        hash_algo = args.hash_algorithm
        hash_size = args.hash_size
        parallel = args.parallel
        batch_size = args.batch_size

        df_dataset, _ = ImageToHash(images_path, hash_size=hash_size, hash_algo=hash_algo) \
            .build_dataset(parallel=parallel, batch_size=batch_size)

        show(df_dataset, output_path)

    if args.command == "search":
        # Config
        images_path = args.images_path
        hash_algo = args.hash_algorithm
        hash_size = args.hash_size
        tree_type = args.tree_type
        distance_metric = args.distance_metric
        nearest_neighbors = args.nearest_neighbors
        leaf_size = args.leaf_size
        parallel = args.parallel
        batch_size = args.batch_size
        threshold = args.threshold
        image_w = args.image_w
        image_h = args.image_h
        query = args.query

        df_dataset, _ = ImageToHash(images_path, hash_size=hash_size, hash_algo=hash_algo) \
            .build_dataset(parallel=parallel, batch_size=batch_size)

        search(df_dataset, output_path, tree_type, distance_metric,
               nearest_neighbors, leaf_size, parallel, batch_size, threshold,
               image_w, image_h, query)