# Copyright (C) 2020 Intel Corporation # # SPDX-License-Identifier: MIT from datumaro.util.tf_util import import_tf import_tf() # prevent TF loading and potential interpeter crash import accuracy_checker.representation as ac import datumaro.components.extractor as dm from datumaro.util.annotation_util import softmax def import_predictions(predictions): # Convert Accuracy checker predictions to Datumaro annotations anns = [] for pred in predictions: anns.extend(import_prediction(pred)) return anns def import_prediction(pred): if isinstance(pred, ac.ClassificationPrediction): scores = softmax(pred.scores) return (dm.Label(label_id, attributes={'score': float(score)}) for label_id, score in enumerate(scores)) elif isinstance(pred, ac.ArgMaxClassificationPrediction): return (dm.Label(int(pred.label)), )
from datumaro.components.annotation import ( AnnotationType, Bbox, Label, LabelCategories, ) from datumaro.components.extractor import ( CategoriesInfo, DatasetItem, IExtractor, ) from datumaro.components.media import ByteImage from datumaro.util.tf_util import import_tf try: tf = import_tf() import tensorflow_datasets as tfds except ImportError: log.debug("Unable to import TensorFlow or TensorFlow Datasets. " \ "Dataset downloading via TFDS is disabled.") TFDS_EXTRACTOR_AVAILABLE = False else: TFDS_EXTRACTOR_AVAILABLE = True @frozen class TfdsDatasetMetadata: default_converter_name: str @frozen
# SPDX-License-Identifier: MIT from tempfile import TemporaryDirectory from pyunpack import Archive from cvat.apps.dataset_manager.bindings import (CvatTaskDataExtractor, import_dm_annotations) from cvat.apps.dataset_manager.util import make_zip_archive from datumaro.components.project import Dataset from .registry import dm_env, exporter, importer from datumaro.util.tf_util import import_tf try: import_tf() tf_available = True except ImportError: tf_available = False @exporter(name='TFRecord', ext='ZIP', version='1.0', enabled=tf_available) def _export(dst_file, task_data, save_images=False): extractor = CvatTaskDataExtractor(task_data, include_images=save_images) extractor = Dataset.from_extractors(extractor) # apply lazy transforms with TemporaryDirectory() as temp_dir: converter = dm_env.make_converter('tf_detection_api', save_images=save_images) converter(extractor, save_dir=temp_dir) make_zip_archive(temp_dir, dst_file)