def create_thread(tid, labels_mapping, user): try: TRESHOLD = 0.5 # Init rq job job = rq.get_current_job() job.meta['progress'] = 0 job.save_meta() # Get job indexes and segment length db_task = TaskModel.objects.get(pk=tid) # Get image list image_list = FrameProvider(db_task.data) # Run auto annotation by tf result = None slogger.glob.info("tf annotation with tensorflow framework for task {}".format(tid)) result = run_tensorflow_annotation(image_list, labels_mapping, TRESHOLD) if result is None: slogger.glob.info('tf annotation for task {} canceled by user'.format(tid)) return # Modify data format and save result = convert_to_cvat_format(result) serializer = LabeledDataSerializer(data = result) if serializer.is_valid(raise_exception=True): put_task_data(tid, result) slogger.glob.info('tf annotation for task {} done'.format(tid)) except Exception as ex: try: slogger.task[tid].exception('exception was occured during tf annotation of the task', exc_info=True) except: slogger.glob.exception('exception was occured during tf annotation of the task {}'.format(tid), exc_info=True) raise ex
def submit(self): if not self.is_empty(): serializer = LabeledDataSerializer(data=self.data) if serializer.is_valid(raise_exception=True): dm.task.patch_task_data(self.task_id, serializer.data, "create") self.reset()
def annotations(self, request, pk): if request.method == 'GET': data = annotation.get_task_data(pk, request.user) serializer = LabeledDataSerializer(data=data) if serializer.is_valid(raise_exception=True): return Response(serializer.data) elif request.method == 'PUT': serializer = LabeledDataSerializer(data=request.data) if serializer.is_valid(raise_exception=True): data = annotation.put_task_data(pk, request.user, serializer.data) return Response(data) elif request.method == 'DELETE': annotation.delete_task_data(pk, request.user) return Response(status=status.HTTP_204_NO_CONTENT) elif request.method == 'PATCH': action = self.request.query_params.get("action", None) if action not in annotation.PatchAction.values(): raise serializers.ValidationError( "Please specify a correct 'action' for the request") serializer = LabeledDataSerializer(data=request.data) if serializer.is_valid(raise_exception=True): try: data = annotation.patch_task_data(pk, request.user, serializer.data, action) except (AttributeError, IntegrityError) as e: return Response(data=str(e), status=status.HTTP_400_BAD_REQUEST) return Response(data)
def annotations(self, request, pk): self.get_object() # force to call check_object_permissions if request.method == 'GET': data = annotation.get_task_data(pk, request.user) serializer = LabeledDataSerializer(data=data) if serializer.is_valid(raise_exception=True): return Response(serializer.data) elif request.method == 'PUT': if request.query_params.get("format", ""): return load_data_proxy( request=request, rq_id="{}@/api/v1/tasks/{}/annotations/upload".format(request.user, pk), rq_func=annotation.load_task_data, pk=pk, ) else: serializer = LabeledDataSerializer(data=request.data) if serializer.is_valid(raise_exception=True): data = annotation.put_task_data(pk, request.user, serializer.data) return Response(data) elif request.method == 'DELETE': annotation.delete_task_data(pk, request.user) return Response(status=status.HTTP_204_NO_CONTENT) elif request.method == 'PATCH': action = self.request.query_params.get("action", None) if action not in annotation.PatchAction.values(): raise serializers.ValidationError( "Please specify a correct 'action' for the request") serializer = LabeledDataSerializer(data=request.data) if serializer.is_valid(raise_exception=True): try: data = annotation.patch_task_data(pk, request.user, serializer.data, action) except (AttributeError, IntegrityError) as e: return Response(data=str(e), status=status.HTTP_400_BAD_REQUEST) return Response(data)
def run_inference_thread(tid, model_file, weights_file, labels_mapping, attributes, convertation_file, reset, user, restricted=True): def update_progress(job, progress): job.refresh() if "cancel" in job.meta: del job.meta["cancel"] job.save() return False job.meta["progress"] = progress job.save_meta() return True try: job = rq.get_current_job() job.meta["progress"] = 0 job.save_meta() db_task = TaskModel.objects.get(pk=tid) result = None slogger.glob.info("auto annotation with openvino toolkit for task {}".format(tid)) result = run_inference_engine_annotation( data=get_image_data(db_task.get_data_dirname()), model_file=model_file, weights_file=weights_file, labels_mapping=labels_mapping, attribute_spec=attributes, convertation_file= convertation_file, job=job, update_progress=update_progress, restricted=restricted ) if result is None: slogger.glob.info("auto annotation for task {} canceled by user".format(tid)) return serializer = LabeledDataSerializer(data = result) if serializer.is_valid(raise_exception=True): if reset: put_task_data(tid, user, result) else: patch_task_data(tid, user, result, "create") slogger.glob.info("auto annotation for task {} done".format(tid)) except Exception as e: try: slogger.task[tid].exception("exception was occurred during auto annotation of the task", exc_info=True) except Exception as ex: slogger.glob.exception("exception was occurred during auto annotation of the task {}: {}".format(tid, str(ex)), exc_info=True) raise ex raise e
def serialize_annotations(): job_annotations = [] db_jobs = self._get_db_jobs() db_job_ids = (j.id for j in db_jobs) for db_job_id in db_job_ids: annotations = dm.task.get_job_data(db_job_id) annotations_serializer = LabeledDataSerializer( data=annotations) annotations_serializer.is_valid(raise_exception=True) job_annotations.append( self._prepare_annotations(annotations_serializer.data, self._label_mapping)) return job_annotations
def annotations(self, request, pk): db_task = self.get_object() # force to call check_object_permissions if request.method == 'GET': format_name = request.query_params.get('format') if format_name: return _export_annotations( db_task=db_task, rq_id="/api/v1/tasks/{}/annotations/{}".format( pk, format_name), request=request, action=request.query_params.get("action", "").lower(), callback=dm.views.export_task_annotations, format_name=format_name, filename=request.query_params.get("filename", "").lower(), ) else: data = dm.task.get_task_data(pk) serializer = LabeledDataSerializer(data=data) if serializer.is_valid(raise_exception=True): return Response(serializer.data) elif request.method == 'PUT': format_name = request.query_params.get('format') if format_name: return _import_annotations( request=request, rq_id="{}@/api/v1/tasks/{}/annotations/upload".format( request.user, pk), rq_func=dm.task.import_task_annotations, pk=pk, format_name=format_name, ) else: serializer = LabeledDataSerializer(data=request.data) if serializer.is_valid(raise_exception=True): data = dm.task.put_task_data(pk, serializer.data) return Response(data) elif request.method == 'DELETE': dm.task.delete_task_data(pk) return Response(status=status.HTTP_204_NO_CONTENT) elif request.method == 'PATCH': action = self.request.query_params.get("action", None) if action not in dm.task.PatchAction.values(): raise serializers.ValidationError( "Please specify a correct 'action' for the request") serializer = LabeledDataSerializer(data=request.data) if serializer.is_valid(raise_exception=True): try: data = dm.task.patch_task_data(pk, serializer.data, action) except (AttributeError, IntegrityError) as e: return Response(data=str(e), status=status.HTTP_400_BAD_REQUEST) return Response(data)
def _create_annotations(self, db_job, annotations): self._prepare_annotations(annotations, self._labels_mapping) serializer = LabeledDataSerializer(data=annotations) serializer.is_valid(raise_exception=True) dm.task.put_job_data(db_job.id, serializer.data)
def serialize(self): serializer = LabeledDataSerializer(data=self.data) if serializer.is_valid(raise_exception=True): return serializer.data
def _call_reid(function, db_task, quality, threshold, max_distance): data = dm.task.get_task_data(db_task.id) boxes_by_frame = [[] for _ in range(db_task.data.size)] shapes_without_boxes = [] for shape in data["shapes"]: if shape["type"] == str(ShapeType.RECTANGLE): boxes_by_frame[shape["frame"]].append(shape) else: shapes_without_boxes.append(shape) paths = {} for frame in range(db_task.data.size - 1): boxes0 = boxes_by_frame[frame] for box in boxes0: if "path_id" not in box: path_id = len(paths) paths[path_id] = [box] box["path_id"] = path_id boxes1 = boxes_by_frame[frame + 1] if boxes0 and boxes1: matching = function.invoke(db_task, data={ "frame0": frame, "frame1": frame + 1, "quality": quality, "boxes0": boxes0, "boxes1": boxes1, "threshold": threshold, "max_distance": max_distance }) for idx0, idx1 in enumerate(matching): if idx1 >= 0: path_id = boxes0[idx0]["path_id"] boxes1[idx1]["path_id"] = path_id paths[path_id].append(boxes1[idx1]) progress = (frame + 2) / db_task.data.size if not LambdaJob._update_progress(progress): break for box in boxes_by_frame[db_task.data.size - 1]: if "path_id" not in box: path_id = len(paths) paths[path_id] = [box] box["path_id"] = path_id tracks = [] for path_id in paths: box0 = paths[path_id][0] tracks.append({ "label_id": box0["label_id"], "group": None, "attributes": [], "frame": box0["frame"], "shapes": paths[path_id], "source": str(SourceType.AUTO) }) for box in tracks[-1]["shapes"]: box.pop("id", None) box.pop("path_id") box.pop("group") box.pop("label_id") box.pop("source") box["outside"] = False box["attributes"] = [] for track in tracks: if track["shapes"][-1]["frame"] != db_task.data.size - 1: box = track["shapes"][-1].copy() box["outside"] = True box["frame"] += 1 track["shapes"].append(box) if tracks: data["shapes"] = shapes_without_boxes data["tracks"].extend(tracks) serializer = LabeledDataSerializer(data=data) if serializer.is_valid(raise_exception=True): dm.task.put_task_data(db_task.id, serializer.data)
def main(): kwargs = _get_kwargs() py_file = kwargs['py'] bin_file = kwargs['bin'] mapping_file = kwargs['json'] xml_file = kwargs['xml'] if not os.path.isfile(py_file): logging.critical('Py file not found! Check the path') return if not os.path.isfile(bin_file): logging.critical('Bin file is not found! Check path!') return if not os.path.isfile(xml_file): logging.critical('XML File not found! Check path!') return if not os.path.isfile(mapping_file): logging.critical('JSON file is not found! Check path!') return with open(mapping_file) as json_file: try: mapping = json.load(json_file) except json.decoder.JSONDecodeError: logging.critical('JSON file not able to be parsed! Check file') return try: mapping = mapping['label_map'] except KeyError: logging.critical("JSON Mapping file must contain key `label_map`!") logging.critical("Exiting") return mapping = {int(k): v for k, v in mapping.items()} restricted = kwargs['restricted'] image_files = kwargs.get('image_files') if image_files: image_data = [cv2.imread(f) for f in image_files] else: test_image = np.ones((1024, 1980, 3), np.uint8) * 255 image_data = [ test_image, ] attribute_spec = {} results = run_inference_engine_annotation(image_data, xml_file, bin_file, mapping, attribute_spec, py_file, restricted=restricted) if kwargs['serialize']: os.environ['DJANGO_SETTINGS_MODULE'] = 'cvat.settings.production' import django django.setup() from cvat.apps.engine.serializers import LabeledDataSerializer # NOTE: We're actually using `run_inference_engine_annotation` # incorrectly here. The `mapping` dict is supposed to be a mapping # of integers -> integers and represents the transition from model # integers to the labels in the database. We're using a mapping of # integers -> strings. For testing purposes, this shortcut is fine. # We just want to make sure everything works. Until, that is.... # we want to test using the label serializer. Then we have to transition # back to integers, otherwise the serializer complains about have a string # where an integer is expected. We'll just brute force that. for shape in results['shapes']: # Change the english label to an integer for serialization validation shape['label_id'] = 1 serializer = LabeledDataSerializer(data=results) if not serializer.is_valid(): logging.critical('Data unable to be serialized correctly!') serializer.is_valid(raise_exception=True) logging.warning('Program didn\'t have any errors.') show_images = kwargs.get('show_images', False) if show_images: if image_files is None: logging.critical("Warning, no images provided!") logging.critical('Exiting without presenting results') return if not results['shapes']: logging.warning(str(results)) logging.critical("No objects detected!") return show_image_delay = kwargs['show_image_delay'] for index, data in enumerate(image_data): for detection in results['shapes']: if not detection['frame'] == index: continue points = detection['points'] # Cv2 doesn't like floats for drawing points = [int(p) for p in points] color = random_color() if detection['type'] == 'rectangle': cv2.rectangle(data, (points[0], points[1]), (points[2], points[3]), color, 3) elif detection['type'] in ('polygon', 'polyline'): # polylines is picky about datatypes points = pairwise(points) cv2.polylines(data, [points], 1, color) cv2.imshow(str(index), data) cv2.waitKey(show_image_delay) cv2.destroyWindow(str(index))
def main(): kwargs = _get_kwargs() py_file = kwargs.get('py') bin_file = kwargs.get('bin') mapping_file = os.path.abspath(kwargs.get('json')) xml_file = kwargs.get('xml') model_name = kwargs.get('model_name') task_id = kwargs.get('task_id') is_docker = model_name and task_id # xor is `exclusive or`. English is: if one or the other but not both if xor(bool(model_name), bool(task_id)): logging.critical('Must provide both `--model-name` and `--task-id` together!') return if is_docker: files = _get_docker_files(model_name, task_id) py_file = files[0] mapping_file = files[1] bin_file = files[2] xml_file = files[3] image_files = files[4] else: return_ = False if not py_file: logging.critical('Must provide --py file!') return_ = True if not bin_file: logging.critical('Must provide --bin file!') return_ = True if not xml_file: logging.critical('Must provide --xml file!') return_ = True if not mapping_file: logging.critical('Must provide --json file!') return_ = True if return_: return if not os.path.isfile(py_file): logging.critical('Py file not found! Check the path') return if not os.path.isfile(bin_file): logging.critical('Bin file is not found! Check path!') return if not os.path.isfile(xml_file): logging.critical('XML File not found! Check path!') return if not os.path.isfile(mapping_file): logging.critical('JSON file is not found! Check path!') return with open(mapping_file) as json_file: try: mapping = json.load(json_file) except json.decoder.JSONDecodeError: logging.critical('JSON file not able to be parsed! Check file') return try: mapping = mapping['label_map'] except KeyError: logging.critical("JSON Mapping file must contain key `label_map`!") logging.critical("Exiting") return mapping = {int(k): v for k, v in mapping.items()} restricted = kwargs['restricted'] if not is_docker: image_files = kwargs.get('image_files') if image_files: image_data = [cv2.imread(f) for f in image_files] else: test_image = np.ones((1024, 1980, 3), np.uint8) * 255 image_data = [test_image,] attribute_spec = {} results = run_inference_engine_annotation(image_data, xml_file, bin_file, mapping, attribute_spec, py_file, restricted=restricted) logging.warning('Inference didn\'t have any errors.') show_images = kwargs.get('show_images', False) if show_images: if image_files is None: logging.critical("Warning, no images provided!") logging.critical('Exiting without presenting results') return if not results['shapes']: logging.warning(str(results)) logging.critical("No objects detected!") return show_image_delay = kwargs['show_image_delay'] show_labels = kwargs.get('show_labels') for index, data in enumerate(image_data): for detection in results['shapes']: if not detection['frame'] == index: continue points = detection['points'] label_str = detection['label_id'] # Cv2 doesn't like floats for drawing points = [int(p) for p in points] color = random_color() if detection['type'] == 'rectangle': cv2.rectangle(data, (points[0], points[1]), (points[2], points[3]), color, 3) if show_labels: cv2.putText(data, label_str, (points[0], points[1] - 7), cv2.FONT_HERSHEY_COMPLEX, 0.6, color, 1) elif detection['type'] in ('polygon', 'polyline'): # polylines is picky about datatypes points = pairwise(points) cv2.polylines(data, [points], 1, color) if show_labels: min_point = find_min_y(points) cv2.putText(data, label_str, (min_point[0], min_point[1] - 7), cv2.FONT_HERSHEY_COMPLEX, 0.6, color, 1) cv2.imshow(str(index), data) cv2.waitKey(show_image_delay) cv2.destroyWindow(str(index)) if kwargs['serialize']: _init_django('cvat.settings.production') from cvat.apps.engine.serializers import LabeledDataSerializer # NOTE: We're actually using `run_inference_engine_annotation` # incorrectly here. The `mapping` dict is supposed to be a mapping # of integers -> integers and represents the transition from model # integers to the labels in the database. We're using a mapping of # integers -> strings. For testing purposes, this shortcut is fine. # We just want to make sure everything works. Until, that is.... # we want to test using the label serializer. Then we have to transition # back to integers, otherwise the serializer complains about have a string # where an integer is expected. We'll just brute force that. for shape in results['shapes']: # Change the english label to an integer for serialization validation shape['label_id'] = 1 serializer = LabeledDataSerializer(data=results) if not serializer.is_valid(): logging.critical('Data unable to be serialized correctly!') serializer.is_valid(raise_exception=True)