Example #1
0
def _import(src_file, instance_data):
    with TemporaryDirectory() as tmp_dir:
        Archive(src_file.name).extractall(tmp_dir)

        dataset = Dataset.import_from(tmp_dir, 'datumaro', env=dm_env)

        import_dm_annotations(dataset, instance_data)
Example #2
0
def _import(src_file, instance_data):
    with TemporaryDirectory() as tmp_dir:
        Archive(src_file.name).extractall(tmp_dir)

        dataset = Dataset.import_from(tmp_dir, 'label_me', env=dm_env)
        dataset.transform('masks_to_polygons')
        import_dm_annotations(dataset, instance_data)
Example #3
0
def _import(src_file, task_data):
    with TemporaryDirectory() as tmp_dir:
        Archive(src_file.name).extractall(tmp_dir)

        image_info = {}
        frames = [
            YoloExtractor.name_from_path(osp.relpath(p, tmp_dir))
            for p in glob(osp.join(tmp_dir, '**', '*.txt'), recursive=True)
        ]
        root_hint = find_dataset_root(
            [DatasetItem(id=frame) for frame in frames], task_data)
        for frame in frames:
            frame_info = None
            try:
                frame_id = match_dm_item(DatasetItem(id=frame),
                                         task_data,
                                         root_hint=root_hint)
                frame_info = task_data.frame_info[frame_id]
            except Exception:
                pass
            if frame_info is not None:
                image_info[frame] = (frame_info['height'], frame_info['width'])

        dataset = dm_env.make_importer('yolo')(tmp_dir, image_info=image_info) \
            .make_dataset()
        import_dm_annotations(dataset, task_data)
Example #4
0
def _import(src_file, instance_data):
    with TemporaryDirectory() as tmp_dir:
        zipfile.ZipFile(src_file).extractall(tmp_dir)

        dataset = Dataset.import_from(tmp_dir, 'vgg_face2', env=dm_env)
        dataset.transform('rename', r"|([^/]+/)?(.+)|\2|")
        import_dm_annotations(dataset, instance_data)
Example #5
0
def _import(src_file, instance_data, load_data_callback=None):
    with TemporaryDirectory() as tmp_dir:
        Archive(src_file.name).extractall(tmp_dir)

        # put label map from the task if not present
        labelmap_file = osp.join(tmp_dir, 'labelmap.txt')
        if not osp.isfile(labelmap_file):
            labels_meta = instance_data.meta['project']['labels'] \
                if isinstance(instance_data, ProjectData) else instance_data.meta['task']['labels']
            labels = (label['name'] + ':::' for _, label in labels_meta)
            with open(labelmap_file, 'w') as f:
                f.write('\n'.join(labels))

        # support flat archive layout
        anno_dir = osp.join(tmp_dir, 'Annotations')
        if not osp.isdir(anno_dir):
            anno_files = glob(osp.join(tmp_dir, '**', '*.xml'), recursive=True)
            subsets_dir = osp.join(tmp_dir, 'ImageSets', 'Main')
            os.makedirs(subsets_dir, exist_ok=True)
            with open(osp.join(subsets_dir, 'train.txt'), 'w') as subset_file:
                for f in anno_files:
                    subset_file.write(osp.splitext(osp.basename(f))[0] + '\n')

            os.makedirs(anno_dir, exist_ok=True)
            for f in anno_files:
                shutil.move(f, anno_dir)

        dataset = Dataset.import_from(tmp_dir, 'voc', env=dm_env)
        dataset.transform('masks_to_polygons')
        if load_data_callback is not None:
            load_data_callback(dataset, instance_data)
        import_dm_annotations(dataset, instance_data)
Example #6
0
def _import(src_file, task_data):
    with TemporaryDirectory() as tmp_dir:
        zipfile.ZipFile(src_file).extractall(tmp_dir)
        dataset = Dataset.import_from(tmp_dir, 'icdar_text_segmentation', env=dm_env)
        dataset.transform(AddLabelToAnns, 'icdar')
        dataset.transform('masks_to_polygons')
        import_dm_annotations(dataset, task_data)
Example #7
0
def _import(src_file, task_data):
    with TemporaryDirectory() as tmp_dir:
        Archive(src_file.name).extractall(tmp_dir)

        dataset = dm_env.make_importer('tf_detection_api')(
            tmp_dir).make_dataset()
        import_dm_annotations(dataset, task_data)
Example #8
0
def _import(src_file, instance_data, load_data_callback=None):
    with TemporaryDirectory() as tmp_dir:
        Archive(src_file.name).extractall(tmp_dir)

        image_info = {}
        frames = [YoloExtractor.name_from_path(osp.relpath(p, tmp_dir))
            for p in glob(osp.join(tmp_dir, '**', '*.txt'), recursive=True)]
        root_hint = find_dataset_root(
            [DatasetItem(id=frame) for frame in frames], instance_data)
        for frame in frames:
            frame_info = None
            try:
                frame_id = match_dm_item(DatasetItem(id=frame), instance_data,
                    root_hint=root_hint)
                frame_info = instance_data.frame_info[frame_id]
            except Exception: # nosec
                pass
            if frame_info is not None:
                image_info[frame] = (frame_info['height'], frame_info['width'])

        dataset = Dataset.import_from(tmp_dir, 'yolo',
            env=dm_env, image_info=image_info)
        if load_data_callback is not None:
            load_data_callback(dataset, instance_data)
        import_dm_annotations(dataset, instance_data)
Example #9
0
def _import(src_file, task_data):
    with TemporaryDirectory() as tmp_dir:
        Archive(src_file.name).extractall(tmp_dir)

        # put label map from the task if not present
        labelmap_file = osp.join(tmp_dir, 'labelmap.txt')
        if not osp.isfile(labelmap_file):
            labels = (label['name'] + ':::'
                for _, label in task_data.meta['task']['labels'])
            with open(labelmap_file, 'w') as f:
                f.write('\n'.join(labels))

        # support flat archive layout
        anno_dir = osp.join(tmp_dir, 'Annotations')
        if not osp.isdir(anno_dir):
            anno_files = glob(osp.join(tmp_dir, '**', '*.xml'), recursive=True)
            subsets_dir = osp.join(tmp_dir, 'ImageSets', 'Main')
            os.makedirs(subsets_dir, exist_ok=True)
            with open(osp.join(subsets_dir, 'train.txt'), 'w') as subset_file:
                for f in anno_files:
                    subset_file.write(osp.splitext(osp.basename(f))[0] + '\n')

            os.makedirs(anno_dir, exist_ok=True)
            for f in anno_files:
                shutil.move(f, anno_dir)

        dataset = dm_env.make_importer('voc')(tmp_dir).make_dataset()
        masks_to_polygons = dm_env.transforms.get('masks_to_polygons')
        dataset = dataset.transform(masks_to_polygons)
        import_dm_annotations(dataset, task_data)
Example #10
0
def load(file_object, annotations):
    from pyunpack import Archive
    import os.path as osp
    from tempfile import TemporaryDirectory
    from glob import glob
    from datumaro.plugins.yolo_format.importer import YoloImporter
    from cvat.apps.dataset_manager.bindings import import_dm_annotations

    archive_file = file_object if isinstance(file_object, str) else getattr(
        file_object, "name")
    with TemporaryDirectory() as tmp_dir:
        Archive(archive_file).extractall(tmp_dir)

        image_info = {}
        anno_files = glob(osp.join(tmp_dir, '**', '*.txt'), recursive=True)
        for filename in anno_files:
            filename = osp.basename(filename)
            frame_info = None
            try:
                frame_info = annotations.frame_info[int(
                    osp.splitext(filename)[0])]
            except Exception:
                pass
            try:
                frame_info = annotations.match_frame(filename)
                frame_info = annotations.frame_info[frame_info]
            except Exception:
                pass
            if frame_info is not None:
                image_info[osp.splitext(filename)[0]] = \
                    (frame_info['height'], frame_info['width'])

        dm_project = YoloImporter()(tmp_dir, image_info=image_info)
        dm_dataset = dm_project.make_dataset()
        import_dm_annotations(dm_dataset, annotations)
Example #11
0
def _import(src_file, instance_data):
    with TemporaryDirectory() as tmp_dir:
        zipfile.ZipFile(src_file).extractall(tmp_dir)

        dataset = Dataset.import_from(tmp_dir, 'market1501', env=dm_env)
        dataset.transform(AttrToLabelAttr, 'market-1501')
        import_dm_annotations(dataset, instance_data)
Example #12
0
File: icdar.py Project: anhvth/cvat
def _import(src_file, instance_data):
    with TemporaryDirectory() as tmp_dir:
        zipfile.ZipFile(src_file).extractall(tmp_dir)
        dataset = Dataset.import_from(tmp_dir,
                                      'icdar_word_recognition',
                                      env=dm_env)
        dataset.transform(CaptionToLabel, 'icdar')
        import_dm_annotations(dataset, instance_data)
Example #13
0
def _import(src_file, instance_data, load_data_callback=None):
    with TemporaryDirectory() as tmp_dir:
        zipfile.ZipFile(src_file).extractall(tmp_dir)

        dataset = Dataset.import_from(tmp_dir, 'wider_face', env=dm_env)
        if load_data_callback is not None:
            load_data_callback(dataset, instance_data)
        import_dm_annotations(dataset, instance_data)
Example #14
0
def _import(src_file, task_data):
    with TemporaryDirectory() as tmp_dir:
        Archive(src_file.name).extractall(tmp_dir)

        dataset = dm_env.make_importer('voc')(tmp_dir).make_dataset()
        masks_to_polygons = dm_env.transforms.get('masks_to_polygons')
        dataset = dataset.transform(masks_to_polygons)
        import_dm_annotations(dataset, task_data)
Example #15
0
def _import(src_file, task_data):
    with TemporaryDirectory() as tmp_dir:
        zipfile.ZipFile(src_file).extractall(tmp_dir)
        if glob(osp.join(tmp_dir, '*.txt')):
            dataset = Dataset.import_from(tmp_dir, 'imagenet_txt', env=dm_env)
        else:
            dataset = Dataset.import_from(tmp_dir, 'imagenet', env=dm_env)
        import_dm_annotations(dataset, task_data)
Example #16
0
def _import(src_file, instance_data, load_data_callback=None):
    with TemporaryDirectory() as tmp_dir:
        Archive(src_file.name).extractall(tmp_dir)

        dataset = Dataset.import_from(tmp_dir, 'lfw')
        if load_data_callback is not None:
            load_data_callback(dataset, instance_data)
        import_dm_annotations(dataset, instance_data)
Example #17
0
def _import(src_file, task_data):
    with TemporaryDirectory() as tmp_dir:
        zipfile.ZipFile(src_file).extractall(tmp_dir)
        if glob(osp.join(tmp_dir, '*.txt')):
            dataset = dm_env.make_importer('imagenet_txt')(
                tmp_dir).make_dataset()
        else:
            dataset = dm_env.make_importer('imagenet')(tmp_dir).make_dataset()
        import_dm_annotations(dataset, task_data)
Example #18
0
def _import(src_file, instance_data, load_data_callback=None):
    with TemporaryDirectory() as tmp_dir:
        zipfile.ZipFile(src_file).extractall(tmp_dir)
        dataset = Dataset.import_from(tmp_dir, 'icdar_text_segmentation', env=dm_env)
        dataset.transform(AddLabelToAnns, 'icdar')
        dataset.transform('masks_to_polygons')
        if load_data_callback is not None:
            load_data_callback(dataset, instance_data)
        import_dm_annotations(dataset, instance_data)
Example #19
0
def _import(src_file, instance_data, load_data_callback=None):
    with TemporaryDirectory() as tmp_dir:
        Archive(src_file.name).extractall(tmp_dir)

        dataset = Dataset.import_from(tmp_dir, 'voc', env=dm_env)
        dataset.transform('masks_to_polygons')
        if load_data_callback is not None:
            load_data_callback(dataset, instance_data)
        import_dm_annotations(dataset, instance_data)
Example #20
0
File: icdar.py Project: anhvth/cvat
def _import(src_file, instance_data):
    with TemporaryDirectory() as tmp_dir:
        zipfile.ZipFile(src_file).extractall(tmp_dir)

        dataset = Dataset.import_from(tmp_dir,
                                      'icdar_text_localization',
                                      env=dm_env)
        dataset.transform(AddLabelToAnns, 'icdar')
        import_dm_annotations(dataset, instance_data)
Example #21
0
def _import(src_file, instance_data, load_data_callback=None):
    with TemporaryDirectory() as tmp_dir:
        zipfile.ZipFile(src_file).extractall(tmp_dir)
        if glob(osp.join(tmp_dir, '*.txt')):
            dataset = Dataset.import_from(tmp_dir, 'imagenet_txt', env=dm_env)
        else:
            dataset = Dataset.import_from(tmp_dir, 'imagenet', env=dm_env)
            if load_data_callback is not None:
                load_data_callback(dataset, instance_data)
        import_dm_annotations(dataset, instance_data)
Example #22
0
def _import(src_file, task_data):
    if zipfile.is_zipfile(src_file):
        with TemporaryDirectory() as tmp_dir:
            zipfile.ZipFile(src_file).extractall(tmp_dir)

            dataset = dm_env.make_importer('coco')(tmp_dir).make_dataset()
            import_dm_annotations(dataset, task_data)
    else:
        dataset = dm_env.make_extractor('coco_instances', src_file.name)
        import_dm_annotations(dataset, task_data)
Example #23
0
def _import(src_file, task_data):
    if zipfile.is_zipfile(src_file):
        with TemporaryDirectory() as tmp_dir:
            zipfile.ZipFile(src_file).extractall(tmp_dir)

            dataset = Dataset.import_from(tmp_dir, 'kitti_raw', env=dm_env)
            import_dm_annotations(dataset, task_data)
    else:

        dataset = Dataset.import_from(src_file.name, 'kitti_raw', env=dm_env)
        import_dm_annotations(dataset, task_data)
Example #24
0
def _import(src_file, instance_data):
    if zipfile.is_zipfile(src_file):
        with TemporaryDirectory() as tmp_dir:
            zipfile.ZipFile(src_file).extractall(tmp_dir)

            dataset = Dataset.import_from(tmp_dir, 'coco', env=dm_env)
            import_dm_annotations(dataset, instance_data)
    else:
        dataset = Dataset.import_from(src_file.name,
            'coco_instances', env=dm_env)
        import_dm_annotations(dataset, instance_data)
Example #25
0
def _import(src_file, instance_data, load_data_callback=None):

    with TemporaryDirectory() as tmp_dir:
        if zipfile.is_zipfile(src_file):
            zipfile.ZipFile(src_file).extractall(tmp_dir)

            dataset = Dataset.import_from(tmp_dir, 'sly_pointcloud', env=dm_env)
        else:
            dataset = Dataset.import_from(src_file.name,
                                        'sly_pointcloud', env=dm_env)
        if load_data_callback is not None:
            load_data_callback(dataset, instance_data)
        import_dm_annotations(dataset, instance_data)
Example #26
0
def load(file_object, annotations):
    from pyunpack import Archive
    from tempfile import TemporaryDirectory
    from datumaro.plugins.tf_detection_api_format.importer import TfDetectionApiImporter
    from cvat.apps.dataset_manager.bindings import import_dm_annotations

    archive_file = file_object if isinstance(file_object, str) else getattr(
        file_object, "name")
    with TemporaryDirectory() as tmp_dir:
        Archive(archive_file).extractall(tmp_dir)

        dm_project = TfDetectionApiImporter()(tmp_dir)
        dm_dataset = dm_project.make_dataset()
        import_dm_annotations(dm_dataset, annotations)
Example #27
0
def load(file_object, annotations):
    from pyunpack import Archive
    from tempfile import TemporaryDirectory
    from datumaro.plugins.labelme_format import LabelMeImporter
    from datumaro.components.project import Environment
    from cvat.apps.dataset_manager.bindings import import_dm_annotations

    archive_file = file_object if isinstance(file_object, str) else getattr(
        file_object, "name")
    with TemporaryDirectory() as tmp_dir:
        Archive(archive_file).extractall(tmp_dir)

        dm_dataset = LabelMeImporter()(tmp_dir).make_dataset()
        masks_to_polygons = Environment().transforms.get('masks_to_polygons')
        dm_dataset = dm_dataset.transform(masks_to_polygons)
        import_dm_annotations(dm_dataset, annotations)
Example #28
0
def _import(src_file, instance_data, load_data_callback=None):
    with TemporaryDirectory() as tmp_dir:
        Archive(src_file.name).extractall(tmp_dir)

        labelmap_file = osp.join(tmp_dir, 'label_colors.txt')
        if not osp.isfile(labelmap_file):
            colormap = {
                label: info[0]
                for label, info in make_colormap(instance_data).items()
            }
            write_label_map(labelmap_file, colormap)

        dataset = Dataset.import_from(tmp_dir, 'cityscapes', env=dm_env)
        dataset.transform('masks_to_polygons')
        if load_data_callback is not None:
            load_data_callback(dataset, instance_data)
        import_dm_annotations(dataset, instance_data)
Example #29
0
def _import(src_file, instance_data):
    with TemporaryDirectory() as tmp_dir:
        Archive(src_file.name).extractall(tmp_dir)

        color_map = {k: v[0] for k, v in make_colormap(instance_data).items()}
        color_map_path = osp.join(tmp_dir, KittiPath.LABELMAP_FILE)
        if not osp.isfile(color_map_path):
            write_label_map(color_map_path, color_map)

        dataset = Dataset.import_from(tmp_dir, format='kitti', env=dm_env)
        labels_meta = instance_data.meta['project']['labels'] \
            if isinstance(instance_data, ProjectData) else instance_data.meta['task']['labels']
        if 'background' not in [label['name'] for _, label in labels_meta]:
            dataset.filter('/item/annotation[label != "background"]',
                           filter_annotations=True)
        dataset.transform('masks_to_polygons')

        import_dm_annotations(dataset, instance_data)
Example #30
0
def _import(src_file, instance_data, load_data_callback=None):
    is_zip = zipfile.is_zipfile(src_file)
    src_file.seek(0)
    if is_zip:
        with TemporaryDirectory() as tmp_dir:
            zipfile.ZipFile(src_file).extractall(tmp_dir)

            if isinstance(instance_data, ProjectData):
                dataset = Dataset.import_from(tmp_dir, 'cvat', env=dm_env)
                if load_data_callback is not None:
                    load_data_callback(dataset, instance_data)
                import_dm_annotations(dataset, instance_data)
            else:
                anno_paths = glob(osp.join(tmp_dir, '**', '*.xml'),
                                  recursive=True)
                for p in anno_paths:
                    load_anno(p, instance_data)
    else:
        load_anno(src_file, instance_data)