def upload_project(project_path, project_name, description, ptype, from_s3_bucket=None, annotation_status='NotStarted', image_quality_in_editor=None): if isinstance(project_path, str): project_path = Path(project_path) projects = sa.search_projects(project_name, return_metadata=True) for project in projects: sa.delete_project(project) project = sa.create_project(project_name, description, ptype) sa.create_annotation_classes_from_classes_json( project, project_path / "classes" / "classes.json", from_s3_bucket=from_s3_bucket) sa.upload_images_from_folder_to_project( project, project_path, annotation_status=annotation_status, from_s3_bucket=from_s3_bucket, image_quality_in_editor=image_quality_in_editor) sa.upload_annotations_from_folder_to_project(project, project_path, from_s3_bucket=from_s3_bucket) return project
def test_recursive_preannotations_folder(tmpdir): tmpdir = Path(tmpdir) projects_found = sa.search_projects( TEMP_PROJECT_NAME + "2", return_metadata=True ) for pr in projects_found: sa.delete_project(pr) project = sa.create_project(TEMP_PROJECT_NAME + "2", "test", "Vector") sa.upload_images_from_folder_to_project( project, "./tests/sample_recursive_test", annotation_status="QualityCheck", recursive_subfolders=True ) assert len(sa.search_images(project)) == 2 sa.create_annotation_classes_from_classes_json( project, "./tests/sample_recursive_test/classes/classes.json" ) sa.upload_preannotations_from_folder_to_project( project, "./tests/sample_recursive_test", recursive_subfolders=True ) for image in sa.search_images(project): sa.download_image_preannotations(project, image, tmpdir) assert len(list(tmpdir.glob("*.json"))) == 2
def test_preannotations_nonrecursive_s3_folder(tmpdir): tmpdir = Path(tmpdir) projects_found = sa.search_projects( TEMP_PROJECT_NAME + "7", return_metadata=True ) for pr in projects_found: sa.delete_project(pr) project = sa.create_project(TEMP_PROJECT_NAME + "7", "test", "Vector") sa.upload_images_from_folder_to_project( project, "sample_recursive_test", from_s3_bucket="superannotate-python-sdk-test", recursive_subfolders=True ) assert len(sa.search_images(project)) == 2 sa.create_annotation_classes_from_classes_json( project, "sample_recursive_test/classes/classes.json", from_s3_bucket="superannotate-python-sdk-test" ) sa.upload_preannotations_from_folder_to_project( project, "sample_recursive_test", recursive_subfolders=False, from_s3_bucket="superannotate-python-sdk-test" ) for image in sa.search_images(project): sa.download_image_preannotations(project, image, tmpdir)
def test_vector_preannotation_upload_from_s3(tmpdir): projects_found = sa.search_projects(TEST_PROJECT3, return_metadata=True) for pr in projects_found: sa.delete_project(pr) project = sa.create_project(TEST_PROJECT3, "hk_test", project_type="Vector") f = urlparse(f"s3://superannotate-python-sdk-test/{TEST_PROJECT_VECTOR}") sa.upload_images_from_folder_to_project(project, f.path[1:], annotation_status="NotStarted", from_s3_bucket=f.netloc) sa.create_annotation_classes_from_classes_json(project, f.path[1:] + '/classes/classes.json', from_s3_bucket=f.netloc) assert sa.get_project_image_count(project) == 4 sa.upload_preannotations_from_folder_to_project(project, TEST_PROJECT_VECTOR, from_s3_bucket=f.netloc) for image in sa.search_images(project): sa.download_image_preannotations(project, image, tmpdir) assert len(list(Path(tmpdir).glob("*.*"))) == 4 sa.delete_project(project)
def test_missing_annotation_upload(tmpdir): name = "Example Project test vector missing annotation upload" project_type = "Vector" description = "test vector" from_folder = Path("./tests/sample_project_vector_for_checks") projects = sa.search_projects(name, return_metadata=True) for project in projects: sa.delete_project(project) project = sa.create_project(name, description, project_type) sa.upload_images_from_folder_to_project(project, from_folder, annotation_status="NotStarted") sa.create_annotation_classes_from_classes_json( project, from_folder / "classes" / "classes.json") uploaded, couldnt_upload, missing_images = sa.upload_annotations_from_folder_to_project( project, from_folder) print(uploaded, couldnt_upload, missing_images) assert len(uploaded) == 1 assert len(couldnt_upload) == 2 assert len(missing_images) == 1 assert "tests/sample_project_vector_for_checks/example_image_1.jpg___objects.json" in uploaded assert "tests/sample_project_vector_for_checks/example_image_2.jpg___objects.json" in couldnt_upload assert "tests/sample_project_vector_for_checks/example_image_4.jpg___objects.json" in couldnt_upload assert "tests/sample_project_vector_for_checks/example_image_5.jpg___objects.json" in missing_images
def test_add_bbox_noinit(tmpdir): tmpdir = Path(tmpdir) projects_found = sa.search_projects( PROJECT_NAME_NOINIT, return_metadata=True ) for pr in projects_found: sa.delete_project(pr) project = sa.create_project( PROJECT_NAME_NOINIT, PROJECT_DESCRIPTION, "Vector" ) sa.upload_images_from_folder_to_project( project, PATH_TO_SAMPLE_PROJECT, annotation_status="InProgress" ) sa.create_annotation_classes_from_classes_json( project, PATH_TO_SAMPLE_PROJECT / "classes" / "classes.json" ) sa.create_annotation_class(project, "test_add", "#FF0000") images = sa.search_images(project, "example_image_1") image_name = images[0] sa.add_annotation_bbox_to_image( project, image_name, [10, 10, 500, 100], "test_add" ) sa.add_annotation_polygon_to_image( project, image_name, [100, 100, 500, 500, 200, 300], "test_add" ) annotations_new = sa.get_image_annotations(project, image_name)["annotation_json"] assert len(annotations_new) == 2 export = sa.prepare_export(project, include_fuse=True) sa.download_export(project, export, tmpdir) assert len(list(Path(tmpdir).rglob("*.*"))) == 4
def test_preannotation_folder_upload_download_cli(project_type, name, description, from_folder, tmpdir): projects_found = sa.search_projects(name, return_metadata=True) for pr in projects_found: sa.delete_project(pr) project = sa.create_project(name, description, project_type) sa.upload_images_from_folder_to_project(project, from_folder, annotation_status="InProgress") sa.create_annotation_classes_from_classes_json( project, from_folder / "classes" / "classes.json") subprocess.run([ f"superannotate upload-preannotations --project '{name}' --folder '{from_folder}'" ], check=True, shell=True) time.sleep(5) count_in = len(list(from_folder.glob("*.json"))) images = sa.search_images(project) for image_name in images: sa.download_image_preannotations(project, image_name, tmpdir) count_out = len(list(Path(tmpdir).glob("*.json"))) assert count_in == count_out
def test_folder_annotations(tmpdir): PROJECT_NAME = "test folder annotations" tmpdir = Path(tmpdir) projects_found = sa.search_projects(PROJECT_NAME, return_metadata=True) for pr in projects_found: sa.delete_project(pr) project = sa.create_project(PROJECT_NAME, 'test', 'Vector') project = project["name"] sa.upload_images_from_folder_to_project(project, FROM_FOLDER, annotation_status="InProgress") sa.create_annotation_classes_from_classes_json( project, FROM_FOLDER / "classes" / "classes.json") folder_metadata = sa.create_folder(project, "folder1") assert folder_metadata["name"] == "folder1" folders = sa.search_folders(project, return_metadata=True) assert len(folders) == 1 sa.upload_images_from_folder_to_project(project + "/" + folders[0]["name"], FROM_FOLDER, annotation_status="InProgress") sa.upload_annotations_from_folder_to_project( project + "/" + folders[0]["name"], FROM_FOLDER) annot = sa.get_image_annotations(project, "example_image_1.jpg") assert len(annot["annotation_json"]["instances"]) == 0 annot = sa.get_image_annotations(project + "/folder1", "example_image_1.jpg") assert len(annot["annotation_json"]["instances"]) > 0
def preannotations_upload(command_name, args): parser = argparse.ArgumentParser(prog=_CLI_COMMAND + " " + command_name) parser.add_argument('--project', required=True, help='Project name to upload') parser.add_argument( '--folder', required=True, help= 'Folder (SuperAnnotate format) or JSON path (COCO format) from which to upload' ) parser.add_argument('--format', required=False, default="SuperAnnotate", help='Input preannotations format.') parser.add_argument( '--dataset-name', required=False, help='Input annotations dataset name for COCO projects') parser.add_argument( '--task', required=False, help= 'Task type for COCO projects can be panoptic_segmentation (Pixel), instance_segmentation (Pixel), instance_segmentation (Vector), keypoint_detection (Vector)' ) args = parser.parse_args(args) project_metadata, folder_metadata = sa.get_project_and_folder_metadata( args.project) if args.format != "SuperAnnotate": if args.format != "COCO": raise sa.SABaseException( 0, "Not supported annotations format " + args.format) if args.dataset_name is None: raise sa.SABaseException( 0, "Dataset name should be present for COCO format upload.") if args.task is None: raise sa.SABaseException( 0, "Task name should be present for COCO format upload.") logger.info("Annotations in format %s.", args.format) project_type = project_metadata["type"] tempdir = tempfile.TemporaryDirectory() tempdir_path = Path(tempdir.name) sa.import_annotation(args.folder, tempdir_path, "COCO", args.dataset_name, project_type, args.task) args.folder = tempdir_path sa.create_annotation_classes_from_classes_json( project_metadata, Path(args.folder) / "classes" / "classes.json") if "pre" not in command_name: sa.upload_annotations_from_folder_to_project( (project_metadata, folder_metadata), folder_path=args.folder) else: sa.upload_preannotations_from_folder_to_project( (project_metadata, folder_metadata), folder_path=args.folder)
def test_basic_export(tmpdir): tmpdir = Path(tmpdir) projects_found = sa.search_projects(PROJECT_NAME, return_metadata=True) for pr in projects_found: sa.delete_project(pr) project = sa.create_project(PROJECT_NAME, "t", "Vector") sa.upload_images_from_folder_to_project( project, PROJECT_FOLDER, annotation_status="InProgress" ) len_orig = len(sa.search_images(project)) sa.create_annotation_classes_from_classes_json( project, PROJECT_FOLDER / "classes" / "classes.json" ) sa.upload_annotations_from_folder_to_project(project, PROJECT_FOLDER) export = sa.prepare_export(project, include_fuse=True) sa.download_export(project, export, tmpdir) projects_found = sa.search_projects( PROJECT_NAME + " import", return_metadata=True ) for pr in projects_found: sa.delete_project(pr) project_new = sa.create_project(PROJECT_NAME + " import", "f", "Vector") sa.upload_images_from_folder_to_project( project_new, tmpdir, annotation_status="InProgress" ) len_new = len(sa.search_images(project_new)) assert len_new == len_orig
def test_annotation_download_upload(project_type, name, description, from_folder, tmpdir): projects = sa.search_projects(name, return_metadata=True) for project in projects: sa.delete_project(project) project = sa.create_project(name, description, project_type) sa.upload_images_from_folder_to_project(project, from_folder, annotation_status="NotStarted") sa.create_annotation_classes_from_classes_json( project, from_folder / "classes" / "classes.json") sa.upload_annotations_from_folder_to_project(project, from_folder) image = sa.search_images(project)[2] sa.download_image_annotations(project, image, tmpdir) anns_json_in_folder = list(Path(tmpdir).glob("*.json")) anns_mask_in_folder = list(Path(tmpdir).glob("*.png")) assert len(anns_json_in_folder) == 1 assert len(anns_mask_in_folder) == (1 if project_type == "Pixel" else 0) input_annotation_paths = sa.image_path_to_annotation_paths( from_folder / image, project_type) json1 = json.load(open(input_annotation_paths[0])) json2 = json.load(open(anns_json_in_folder[0])) for i in json1: i.pop("classId", None) for i in json2: i.pop("classId", None) assert json1 == json2 if project_type == "Pixel": assert filecmp.cmp(input_annotation_paths[1], anns_mask_in_folder[0], shallow=False)
def coco_desktop_object(tmpdir): out_dir = tmpdir / "coco_from_desktop" final_dir = tmpdir / "coco_to_Web" sa.export_annotation_format( "tests/converter_test/COCO/input/fromSuperAnnotate/cats_dogs_desktop", str(out_dir), "COCO", "object_test", "Vector", "object_detection", "Desktop") image_list = glob(str(out_dir / 'train_set' / '*.jpg')) for image in image_list: shutil.copy(image, out_dir / Path(image).name) shutil.rmtree(out_dir / 'train_set') sa.import_annotation_format(str(out_dir), str(final_dir), "COCO", "object_test_train", "Vector", "object_detection", "Web") project_name = "coco2sa_object_pipline" projects = sa.search_projects(project_name, True) if projects: sa.delete_project(projects[0]) project = sa.create_project(project_name, "converter vector", "Vector") sa.create_annotation_classes_from_classes_json( project, final_dir / "classes" / "classes.json") sa.upload_images_from_folder_to_project(project, final_dir) sa.upload_annotations_from_folder_to_project(project, final_dir) return 0
def test_df_to_annotations_full(tmpdir): tmpdir = Path(tmpdir) df = sa.aggregate_annotations_as_df( PROJECT_DIR, include_classes_wo_annotations=True, include_comments=True, include_tags=True ) sa.df_to_annotations(df, tmpdir) df_new = sa.aggregate_annotations_as_df( tmpdir, include_classes_wo_annotations=True, include_comments=True, include_tags=True ) for project in sa.search_projects("test df to annotations 4"): sa.delete_project(project) project = sa.create_project("test df to annotations 4", "test", "Vector") sa.upload_images_from_folder_to_project(project, PROJECT_DIR) sa.create_annotation_classes_from_classes_json( project, tmpdir / "classes" / "classes.json" ) sa.upload_annotations_from_folder_to_project(project, tmpdir) # print(df_new["image_name"].value_counts()) # print(df["image_name"].value_counts()) for _index, row in enumerate(df.iterrows()): for _, row_2 in enumerate(df_new.iterrows()): if row_2[1].equals(row[1]): break else: assert False fil1 = sa.filter_annotation_instances( df_new, include=[ { "className": "Personal vehicle", "attributes": [{ "name": "4", "groupName": "Num doors" }] } ], exclude=[{ "type": "polygon" }] ) filtered_export = (tmpdir / "filtered") filtered_export.mkdir() sa.df_to_annotations(fil1, filtered_export) for project in sa.search_projects("test df to annotations 3"): sa.delete_project(project) project = sa.create_project("test df to annotations 3", "test", "Vector") sa.upload_images_from_folder_to_project(project, PROJECT_DIR) sa.create_annotation_classes_from_classes_json( project, filtered_export / "classes" / "classes.json" ) sa.upload_annotations_from_folder_to_project(project, filtered_export)
def test_consensus(): annot_types = ['polygon', 'bbox', 'point'] folder_names = ['consensus_1', 'consensus_2', 'consensus_3'] df_column_names = [ 'creatorEmail', 'imageName', 'instanceId', 'area', 'className', 'attributes', 'folderName', 'score' ] export_path = test_root / 'consensus_benchmark' / 'consensus_test_data' if len(sa.search_projects(project_name)) != 0: sa.delete_project(project_name) sa.create_project(project_name, "test bench", "Vector") for i in range(1, 4): sa.create_folder(project_name, "consensus_" + str(i)) sa.create_annotation_classes_from_classes_json( project_name, export_path / 'classes' / 'classes.json') sa.upload_images_from_folder_to_project(project_name, export_path / "images", annotation_status="Completed") for i in range(1, 4): sa.upload_images_from_folder_to_project(project_name + '/consensus_' + str(i), export_path / "images", annotation_status="Completed") sa.upload_annotations_from_folder_to_project(project_name, export_path) for i in range(1, 4): sa.upload_annotations_from_folder_to_project( project_name + '/consensus_' + str(i), export_path / ('consensus_' + str(i))) for annot_type in annot_types: res_df = sa.consensus(project_name, folder_names, annot_type=annot_type) #test content of projectName column assert sorted(res_df['folderName'].unique()) == folder_names #test structure of resulting DataFrame assert sorted(res_df.columns) == sorted(df_column_names) #test lower bound of the score assert (res_df['score'] >= 0).all() #test upper bound of the score assert (res_df['score'] <= 1).all() image_names = [ 'bonn_000000_000019_leftImg8bit.png', 'bielefeld_000000_000321_leftImg8bit.png' ] #test filtering images with given image names list res_images = sa.consensus(project_name, folder_names, export_root=export_path, image_list=image_names) assert sorted(res_images['imageName'].unique()) == sorted(image_names)
def test_cli_image_upload_project_export(tmpdir): tmpdir = Path(tmpdir) projects_found = sa.search_projects(PROJECT_NAME, return_metadata=True) for pr in projects_found: sa.delete_project(pr) subprocess.run( f'superannotate create-project --name "{PROJECT_NAME}" --description gg --type Vector', check=True, shell=True ) project = PROJECT_NAME sa.create_annotation_classes_from_classes_json( PROJECT_NAME, "./tests/sample_recursive_test/classes/classes.json" ) subprocess.run( f'superannotate upload-images --project "{PROJECT_NAME}" --folder ./tests/sample_recursive_test --extensions=jpg --set-annotation-status QualityCheck' , check=True, shell=True ) time.sleep(1) assert len(sa.search_images(project)) == 1 subprocess.run( f'superannotate upload-images --project "{PROJECT_NAME}" --folder ./tests/sample_recursive_test --extensions=jpg --recursive' , check=True, shell=True ) time.sleep(1) assert len(sa.search_images(project)) == 2 sa.upload_annotations_from_folder_to_project( project, "./tests/sample_recursive_test" ) subprocess.run( f'superannotate export-project --project "{PROJECT_NAME}" --folder {tmpdir}' , check=True, shell=True ) assert len(list(tmpdir.glob("*.json"))) == 1 assert len(list(tmpdir.glob("*.jpg"))) == 0 assert len(list(tmpdir.glob("*.png"))) == 0 time.sleep(60) subprocess.run( f'superannotate export-project --project "{PROJECT_NAME}" --folder {tmpdir} --include-fuse' , check=True, shell=True ) assert len(list(tmpdir.glob("*.json"))) == 1 assert len(list(tmpdir.glob("*.jpg"))) == 1 assert len(list(tmpdir.glob("*.png"))) == 1
def test_folder_export(tmpdir): PROJECT_NAME = "test folder export" tmpdir = Path(tmpdir) projects_found = sa.search_projects(PROJECT_NAME, return_metadata=True) for pr in projects_found: sa.delete_project(pr) project = sa.create_project(PROJECT_NAME, 'test', 'Vector') sa.create_annotation_classes_from_classes_json( project, FROM_FOLDER / "classes" / "classes.json") sa.upload_images_from_folder_to_project(project, FROM_FOLDER, annotation_status="InProgress") sa.create_folder(project, "folder1") project = PROJECT_NAME + "/folder1" sa.upload_images_from_folder_to_project(project, FROM_FOLDER, annotation_status="InProgress") sa.upload_annotations_from_folder_to_project(project, FROM_FOLDER) num_images = sa.get_project_image_count(project) assert num_images == 4 sa.create_folder(PROJECT_NAME, "folder2") project2 = PROJECT_NAME + "/folder2" num_images = sa.get_project_image_count(project2) assert num_images == 0 sa.copy_images(project, ["example_image_2.jpg", "example_image_3.jpg"], project2) export = sa.prepare_export(PROJECT_NAME, ["folder1", "folder2"]) sa.download_export(project, export, tmpdir) assert len(list((tmpdir / "classes").rglob("*"))) == 1 assert len(list((tmpdir / "folder1").rglob("*"))) == 4 assert len(list((tmpdir / "folder2").rglob("*"))) == 2 assert len(list((tmpdir).glob("*.*"))) == 0 export = sa.prepare_export(PROJECT_NAME) sa.download_export(project, export, tmpdir) assert len(list((tmpdir / "classes").rglob("*"))) == 1 assert len(list((tmpdir / "folder1").rglob("*"))) == 4 assert len(list((tmpdir / "folder2").rglob("*"))) == 2 assert len(list((tmpdir).glob("*.*"))) == 4
def test_upload_stress(): project = sa.create_project("test_test_15", "hk", 1) sa.create_annotation_classes_from_classes_json( project, "tests/sample_project_vector/classes/classes.json" ) sa.upload_images_from_folder_to_project( project, "/media/disc_drive/datasets/COCO/test2017", annotation_status="QualityCheck" ) count = sa.get_project_image_count(project) assert count == 40670
def test_from_s3_upload(): projects = sa.search_projects(PROJECT_NAME_UPLOAD, return_metadata=True) for project in projects: sa.delete_project(project) project = sa.create_project(PROJECT_NAME_UPLOAD, "hk", "Vector") sa.create_annotation_classes_from_classes_json( project, "frex9/classes/classes.json", S3_BUCKET) sa.upload_images_from_folder_to_project(project, S3_PREFIX, ["jpg"], annotation_status="QualityCheck", from_s3_bucket=S3_BUCKET) assert len(sa.search_images(project)) == 4
def test_fuse_image_create_pixel(tmpdir): tmpdir = Path(tmpdir) projects = sa.search_projects(PROJECT_NAME_PIXEL, return_metadata=True) for project in projects: sa.delete_project(project) project = sa.create_project(PROJECT_NAME_PIXEL, "test", "Pixel") sa.upload_image_to_project( project, "./tests/sample_project_pixel/example_image_1.jpg", annotation_status="QualityCheck" ) sa.create_annotation_classes_from_classes_json( project, "./tests/sample_project_pixel/classes/classes.json" ) sa.upload_image_annotations( PROJECT_NAME_PIXEL, "example_image_1.jpg", "./tests/sample_project_pixel/example_image_1.jpg___pixel.json", "./tests/sample_project_pixel/example_image_1.jpg___save.png" ) export = sa.prepare_export(project, include_fuse=True) (tmpdir / "export").mkdir() sa.download_export(project, export, (tmpdir / "export")) # sa.create_fuse_image( # "./tests/sample_project_vector/example_image_1.jpg", # "./tests/sample_project_vector/classes/classes.json", "Vector" # ) paths = sa.download_image( project, "example_image_1.jpg", tmpdir, include_annotations=True, include_fuse=True ) print(paths, paths[2]) im1 = Image.open(tmpdir / "export" / "example_image_1.jpg___fuse.png") im1_array = np.array(im1) im2 = Image.open(paths[2][0]) im2_array = np.array(im2) assert im1_array.shape == im2_array.shape assert im1_array.dtype == im2_array.dtype assert np.array_equal(im1_array, im2_array)
def test_copy_images2(tmpdir): PROJECT_NAME = "test copy folder annotation images" tmpdir = Path(tmpdir) projects_found = sa.search_projects(PROJECT_NAME, return_metadata=True) for pr in projects_found: sa.delete_project(pr) project = sa.create_project(PROJECT_NAME, 'test', 'Vector') sa.create_annotation_classes_from_classes_json( project, FROM_FOLDER / "classes" / "classes.json") sa.create_folder(project, "folder1") project = PROJECT_NAME + "/folder1" sa.upload_images_from_folder_to_project(project, FROM_FOLDER, annotation_status="InProgress") sa.upload_annotations_from_folder_to_project(project, FROM_FOLDER) num_images = sa.get_project_image_count(project) assert num_images == 4 sa.create_folder(PROJECT_NAME, "folder2") project2 = PROJECT_NAME + "/folder2" num_images = sa.get_project_image_count(project2) assert num_images == 0 sa.pin_image(project, "example_image_2.jpg") im1 = sa.get_image_metadata(project, "example_image_2.jpg") assert im1["is_pinned"] == 1 assert im1["annotation_status"] == "InProgress" sa.copy_images(project, ["example_image_2.jpg", "example_image_3.jpg"], project2) num_images = sa.get_project_image_count(project2) assert num_images == 2 ann1 = sa.get_image_annotations(project, "example_image_2.jpg") ann2 = sa.get_image_annotations(project2, "example_image_2.jpg") assert ann1 == ann2 im1_copied = sa.get_image_metadata(project2, "example_image_2.jpg") assert im1_copied["is_pinned"] == 1 assert im1_copied["annotation_status"] == "InProgress" im2_copied = sa.get_image_metadata(project2, "example_image_3.jpg") assert im2_copied["is_pinned"] == 0 assert im2_copied["annotation_status"] == "InProgress"
def test_annotations_nonrecursive_s3_folder(tmpdir): tmpdir = Path(tmpdir) projects_found = sa.search_projects( TEMP_PROJECT_NAME + "5", return_metadata=True ) for pr in projects_found: sa.delete_project(pr) project = sa.create_project(TEMP_PROJECT_NAME + "5", "test", "Vector") sa.upload_images_from_folder_to_project( project, "sample_recursive_test", annotation_status="QualityCheck", from_s3_bucket="superannotate-python-sdk-test", recursive_subfolders=True ) assert len(sa.search_images(project)) == 2 sa.create_annotation_classes_from_classes_json( project, "sample_recursive_test/classes/classes.json", from_s3_bucket="superannotate-python-sdk-test" ) sa.upload_annotations_from_folder_to_project( project, "sample_recursive_test", recursive_subfolders=False, from_s3_bucket="superannotate-python-sdk-test" ) export = sa.prepare_export(project) time.sleep(1) sa.download_export(project, export, tmpdir) non_empty_annotations = 0 json_files = tmpdir.glob("*.json") for json_file in json_files: json_ann = json.load(open(json_file)) if "instances" in json_ann and len(json_ann["instances"]) > 0: non_empty_annotations += 1 assert non_empty_annotations == 1
def test_image_copy_mult(tmpdir): tmpdir = Path(tmpdir) projects_found = sa.search_projects(PROJECT_NAME_CPY_MULT, return_metadata=True) for pr in projects_found: sa.delete_project(pr) project = sa.create_project(PROJECT_NAME_CPY_MULT, "test", "Vector") sa.upload_image_to_project( project, "./tests/sample_project_vector/example_image_1.jpg", annotation_status="InProgress") sa.create_annotation_classes_from_classes_json( project, "./tests/sample_project_vector/classes/classes.json") sa.upload_image_annotations( project, "example_image_1.jpg", "./tests/sample_project_vector/example_image_1.jpg___objects.json") sa.upload_image_to_project( project, "./tests/sample_project_vector/example_image_2.jpg", annotation_status="InProgress") sa.pin_image(project, "example_image_1.jpg") images = sa.search_images(project) assert len(images) == 2 image = images[0] for _ in range(3): sa.copy_image(project, image, project, include_annotations=True, copy_annotation_status=True, copy_pin=True) assert len(sa.search_images(project)) == 5 images = sa.search_images(project) for i in range(3): assert f"example_image_1_({i+1}).jpg" in images anns = sa.get_image_annotations(project, f"example_image_1_({i+1}).jpg") assert anns["annotation_json"] is not None metadata = sa.get_image_metadata(project, f"example_image_1_({i+1}).jpg") assert metadata["is_pinned"] == 1
def test_vgg_convert_object(tmpdir): input_dir = Path( "tests") / "converter_test" / "VGG" / "input" / "toSuperAnnotate" out_dir = Path(tmpdir) / "object_detection" sa.import_annotation_format(input_dir, out_dir, "VGG", "vgg_test", "Vector", "object_detection", "Web") project_name = "vgg_test_object" projects = sa.search_projects(project_name, True) if projects: sa.delete_project(projects[0]) project = sa.create_project(project_name, "converter vector", "Vector") sa.create_annotation_classes_from_classes_json( project, out_dir / "classes" / "classes.json") sa.upload_images_from_folder_to_project(project, out_dir) sa.upload_annotations_from_folder_to_project(project, out_dir)
def test_yolo_object_detection_web(tmpdir): input_dir = Path( 'tests') / 'converter_test' / 'YOLO' / 'input' / 'toSuperAnnotate' out_dir = Path(tmpdir) / "vector_annotation_web" sa.import_annotation_format(input_dir, out_dir, 'YOLO', '', 'Vector', 'object_detection', 'Web') project_name = "yolo_object_detection" projects = sa.search_projects(project_name, True) if projects: sa.delete_project(projects[0]) project = sa.create_project(project_name, "converter vector", "Vector") sa.create_annotation_classes_from_classes_json( project, out_dir / "classes" / "classes.json") sa.upload_images_from_folder_to_project(project, out_dir) sa.upload_annotations_from_folder_to_project(project, out_dir)
def test_anntotation_class_new_json(): projects = sa.search_projects(PROJECT_NAME_JSON, return_metadata=True) for project in projects: sa.delete_project(project) sa.create_project(PROJECT_NAME_JSON, "tt", "Vector") sa.create_annotation_classes_from_classes_json( PROJECT_NAME_JSON, "./tests/sample_project_vector/classes/classes.json" ) assert len(sa.search_annotation_classes(PROJECT_NAME_JSON)) == 4 sa.create_annotation_classes_from_classes_json( PROJECT_NAME_JSON, "./tests/sample_project_vector/classes/classes.json" ) assert len(sa.search_annotation_classes(PROJECT_NAME_JSON)) == 4
def test_supervisely_convert_instance(tmpdir): input_dir = Path( 'tests' ) / 'converter_test' / 'Supervisely' / 'input' / 'toSuperAnnotate' / 'vector' out_dir = Path(tmpdir) / 'instance_segmentation' sa.import_annotation_format(input_dir, out_dir, 'Supervisely', '', 'Vector', 'instance_segmentation', 'Web') project_name = "supervisely_test_vector_convert_instance" projects = sa.search_projects(project_name, True) if projects: sa.delete_project(projects[0]) project = sa.create_project(project_name, "converter vector", "Vector") sa.create_annotation_classes_from_classes_json( project, out_dir / "classes" / "classes.json") sa.upload_images_from_folder_to_project(project, out_dir) sa.upload_annotations_from_folder_to_project(project, out_dir)
def supervisely_convert_keypoint(tmpdir): out_dir = tmpdir / 'keypoint_detection' sa.import_annotation_format( 'tests/converter_test/Supervisely/input/toSuperAnnotate/keypoints', str(out_dir), 'Supervisely', '', 'Vector', 'keypoint_detection', 'Web') project_name = "supervisely_test_keypoint" projects = sa.search_projects(project_name, True) if projects: sa.delete_project(projects[0]) project = sa.create_project(project_name, "converter vector", "Vector") sa.create_annotation_classes_from_classes_json( project, out_dir / "classes" / "classes.json") sa.upload_images_from_folder_to_project(project, out_dir) sa.upload_annotations_from_folder_to_project(project, out_dir) return 0
def test_coco_vector_instance(tmpdir): input_dir = Path( "tests" ) / "converter_test" / "COCO" / "input" / "toSuperAnnotate" / "instance_segmentation" out_dir = Path(tmpdir) / "instance_vector" sa.import_annotation_format(input_dir, out_dir, "COCO", "instances_test", "Vector", "instance_segmentation", "Web") project_name = "coco2sa_vector_instance" projects = sa.search_projects(project_name, True) if projects: sa.delete_project(projects[0]) project = sa.create_project(project_name, "converter vector", "Vector") sa.create_annotation_classes_from_classes_json( project, out_dir / "classes" / "classes.json") sa.upload_images_from_folder_to_project(project, out_dir) sa.upload_annotations_from_folder_to_project(project, out_dir)
def vott_convert_vector(tmpdir): out_dir = tmpdir / "vector_annotation" sa.import_annotation_format( "tests/converter_test/VoTT/input/toSuperAnnotate", str(out_dir), "VoTT", "", "Vector", "vector_annotation", "Web") project_name = "vott_vector" projects = sa.search_projects(project_name, True) if projects: sa.delete_project(projects[0]) project = sa.create_project(project_name, "converter vector", "Vector") sa.create_annotation_classes_from_classes_json( project, out_dir / "classes" / "classes.json") sa.upload_images_from_folder_to_project(project, out_dir) sa.upload_annotations_from_folder_to_project(project, out_dir) return 0
def test_export_s3(tmpdir): paginator = s3_client.get_paginator('list_objects_v2') response_iterator = paginator.paginate(Bucket=S3_BUCKET, Prefix=S3_PREFIX2) for response in response_iterator: if 'Contents' in response: for object_data in response['Contents']: key = object_data['Key'] s3_client.delete_object(Bucket=S3_BUCKET, Key=key) tmpdir = Path(tmpdir) projects = sa.search_projects(PROJECT_NAME_EXPORT, return_metadata=True) for project in projects: sa.delete_project(project) project = sa.create_project(PROJECT_NAME_EXPORT, "test", "Vector") sa.upload_images_from_folder_to_project( project, Path("./tests/sample_project_vector"), annotation_status="InProgress") sa.create_annotation_classes_from_classes_json( project, Path("./tests/sample_project_vector/classes/classes.json")) sa.upload_annotations_from_folder_to_project( project, Path("./tests/sample_project_vector")) images = sa.search_images(project) for img in images: sa.set_image_annotation_status(project, img, 'QualityCheck') new_export = sa.prepare_export(project, include_fuse=True) sa.download_export(project, new_export, S3_PREFIX2, to_s3_bucket=S3_BUCKET) files = [] response_iterator = paginator.paginate(Bucket=S3_BUCKET, Prefix=S3_PREFIX2) for response in response_iterator: if 'Contents' in response: for object_data in response['Contents']: key = object_data['Key'] files.append(key) output_path = tmpdir / S3_PREFIX2 output_path.mkdir() sa.download_export(project, new_export, output_path) local_files = list(output_path.rglob("*.*")) assert len(local_files) == len(files)