def add_pair(self, ann_gt, ann_pred): labels_gt = filter_labels_by_name(ann_gt.labels, self._gt_to_pred_class_mapping) all_labels_pred = [label for label in filter_labels_by_name(ann_pred.labels, self._pred_to_gt_class_mapping)] labels_pred = [] for label in all_labels_pred: label_confidence = self._get_confidence_value(label) if label_confidence is None: logger.warn(f'Found a label with class {label.obj_class.name!r} that does not have a ' f'{self._confidence_tag_name!r} tag attached. Skipping this object for metric computation.') else: labels_pred.append(label) match_result = match_labels_by_iou(labels_1=labels_gt, labels_2=labels_pred, img_size=ann_gt.img_size, iou_threshold=self._iou_threshold) for match in match_result.matches: gt_class = match.label_1.obj_class.name label_pred = match.label_2 self._counters[gt_class][MATCHES].append( MatchWithConfidence(is_correct=(label_pred.obj_class.name == self._gt_to_pred_class_mapping[gt_class]), confidence=self._get_confidence_value(label_pred))) # Add unmatched predictions to the list as false positive matches. for umatched_pred in match_result.unmatched_labels_2: gt_class = self._pred_to_gt_class_mapping[umatched_pred.obj_class.name] self._counters[gt_class][MATCHES].append( MatchWithConfidence(is_correct=False, confidence=self._get_confidence_value(umatched_pred))) for label_1 in labels_gt: self._counters[label_1.obj_class.name][TOTAL_GROUND_TRUTH] += 1
def _upload_uniq_images_single_req(self, func_item_to_byte_stream, hashes_items_to_upload): """ Upload images (binary data) to server with single request. Expects unique images that aren't exist at server. :param func_item_to_byte_stream: converter for "item" to byte stream :param hashes_items_to_upload: list of pairs (hash, item) :return: list of hashes for successfully uploaded items """ content_dict = {} for idx, (_, item) in enumerate(hashes_items_to_upload): content_dict["{}-file".format(idx)] = ( str(idx), func_item_to_byte_stream(item), 'image/*') encoder = MultipartEncoder(fields=content_dict) resp = self._api.post('images.bulk.upload', encoder) resp_list = json.loads(resp.text) remote_hashes = [d['hash'] for d in resp_list if 'hash' in d] if len(remote_hashes) != len(hashes_items_to_upload): problem_items = [ (hsh, item, resp['errors']) for (hsh, item), resp in zip(hashes_items_to_upload, resp_list) if resp.get('errors') ] logger.warn('Not all images were uploaded within request.', extra={ 'total_cnt': len(hashes_items_to_upload), 'ok_cnt': len(remote_hashes), 'items': problem_items }) return remote_hashes
def crop(self, rect): try: clipping_window_shpl = ShapelyPolygon( points_to_row_col_list(rect.corners)) self_shpl = ShapelyPolygon(self.exterior_np, holes=self.interior_np) intersections_shpl = self_shpl.buffer(0).intersection( clipping_window_shpl) mapping_shpl = mapping(intersections_shpl) except Exception: logger.warn('Polygon cropping exception, shapely.', exc_info=False) raise intersections = shapely_figure_to_coords_list(mapping_shpl) # Check for bad cropping cases (e.g. empty points list) out_polygons = [] for intersection in intersections: if isinstance(intersection, list) and len(intersection) > 0 and len( intersection[0]) >= 3: exterior = row_col_list_to_points(intersection[0], do_round=True) interiors = [] for interior_contour in intersection[1:]: if len(interior_contour) > 2: interiors.append( row_col_list_to_points(interior_contour, do_round=True)) out_polygons.append(Polygon(exterior, interiors)) return out_polygons
def _upload_data_bulk(self, func_item_to_byte_stream, items_hashes, retry_cnt=3, progress_cb=None): hash_to_items = {i_hash: item for item, i_hash in items_hashes} unique_hashes = set(hash_to_items.keys()) remote_hashes = set(self.check_existing_hashes(list(unique_hashes))) # existing -- from server if progress_cb: progress_cb(len(remote_hashes)) pending_hashes = unique_hashes - remote_hashes for retry_idx in range(retry_cnt): # single attempt to upload all data which is not uploaded yet for hashes in batched(list(pending_hashes)): pending_hashes_items = [(h, hash_to_items[h]) for h in hashes] hashes_rcv = self._upload_uniq_videos_single_req(func_item_to_byte_stream, pending_hashes_items) pending_hashes -= set(hashes_rcv) if set(hashes_rcv) - set(hashes): logger.warn('Hash inconsistency in images bulk upload.', extra={'sent': hashes, 'received': hashes_rcv}) if progress_cb: progress_cb(len(hashes_rcv)) if not pending_hashes: return logger.warn('Unable to upload images (data).', extra={ 'retry_idx': retry_idx, 'items': [(h, hash_to_items[h]) for h in pending_hashes] }) # now retry it for the case if it is a shadow server/connection error raise RuntimeError("Unable to upload images (data). " "Please check if images are in supported format and if ones aren't corrupted.")
def post(self, method, data, retries=3, stream=False): for retry_idx in range(retries): try: url = os.path.join(self.server_address, 'public/api/v3', method) if type(data) is bytes: response = requests.post(url, data=data, headers=self.headers, stream=stream) elif type(data) is MultipartEncoderMonitor or type(data) is MultipartEncoder: response = requests.post(url, data=data, headers={**self.headers, 'Content-Type': data.content_type}, stream=stream) else: json_body = data if type(data) is dict: json_body = {**data, **self.additional_fields} response = requests.post(url, json=json_body, headers=self.headers, stream=stream) if response.status_code != requests.codes.ok: Api._raise_for_status(response) return response except requests.RequestException as exc: exc_str = str(exc) logger.warn('A request to the server has failed.', exc_info=True, extra={'exc_str': exc_str}) if (isinstance(exc, requests.exceptions.HTTPError) and hasattr(exc, 'response') and exc.response.status_code in RETRY_STATUS_CODES and retry_idx < retries - 1): # (retry_idx + 2): one for change the counting base from 0 to 1, # and 1 for indexing the next iteration. logger.warn('Retrying failed request ({}/{}).'.format(retry_idx + 2, retries)) else: raise RuntimeError( 'Request has failed. This may be due to connection problems or invalid requests. ' 'Last failure: {!r}'.format(exc_str))
def _upload_data_bulk(self, func_item_to_byte_stream, items_hashes, retry_cnt=3, progress_cb=None): """ Upload images (binary data) to server. Works with already existing or duplicating images. :param func_item_to_byte_stream: converter for "item" to byte stream :param items_hashes: iterable of pairs (item, hash) where "item" is a some descriptor (e.g. image file path) for image data, and "hash" is a hash for the image binary data :param retry_cnt: int, number of retries to send the whole set of items :param progress_cb: callback to account progress (in number of items) """ hash_to_items = {i_hash: item for item, i_hash in items_hashes} unique_hashes = set(hash_to_items.keys()) remote_hashes = set(self.check_existing_hashes( list(unique_hashes))) # existing -- from server if progress_cb: progress_cb(len(remote_hashes)) pending_hashes = unique_hashes - remote_hashes # @TODO: some correlation with sly.io.network_exceptions. Should we perform retries here? for retry_idx in range(retry_cnt): # single attempt to upload all data which is not uploaded yet for hashes in batched(list(pending_hashes)): pending_hashes_items = [(h, hash_to_items[h]) for h in hashes] hashes_rcv = self._upload_uniq_images_single_req( func_item_to_byte_stream, pending_hashes_items) pending_hashes -= set(hashes_rcv) if set(hashes_rcv) - set(hashes): logger.warn('Hash inconsistency in images bulk upload.', extra={ 'sent': hashes, 'received': hashes_rcv }) if progress_cb: progress_cb(len(hashes_rcv)) if not pending_hashes: return logger.warn('Unable to upload images (data).', extra={ 'retry_idx': retry_idx, 'items': [(h, hash_to_items[h]) for h in pending_hashes] }) # now retry it for the case if it is a shadow server/connection error raise RuntimeError( "Unable to upload images (data). " "Please check if images are in supported format and if ones aren't corrupted." )
def _upload_uniq_videos_single_req(self, func_item_to_byte_stream, hashes_items_to_upload): content_dict = {} for idx, (_, item) in enumerate(hashes_items_to_upload): content_dict["{}-file".format(idx)] = (str(idx), func_item_to_byte_stream(item), 'video/*') encoder = MultipartEncoder(fields=content_dict) resp = self._api.post('videos.bulk.upload', encoder) resp_list = json.loads(resp.text) remote_hashes = [d['hash'] for d in resp_list if 'hash' in d] if len(remote_hashes) != len(hashes_items_to_upload): problem_items = [(hsh, item, resp['errors']) for (hsh, item), resp in zip(hashes_items_to_upload, resp_list) if resp.get('errors')] logger.warn('Not all images were uploaded within request.', extra={ 'total_cnt': len(hashes_items_to_upload), 'ok_cnt': len(remote_hashes), 'items': problem_items}) return remote_hashes
def crop(self, rect): ''' Crop the current Polygon with a given rectangle, if polygon cat't be cropped it generate exception error :param rect: Rectangle class object :return: list of Poligon class objects ''' from supervisely_lib.geometry.point_location import PointLocation try: points = [ PointLocation(row=rect.top, col=rect.left), PointLocation(row=rect.top, col=rect.right + 1), PointLocation(row=rect.bottom + 1, col=rect.right + 1), PointLocation(row=rect.bottom + 1, col=rect.left) ] #points = rect.corners # old implementation with 1 pixel error (right bottom) #@TODO: investigate here (critical issue) clipping_window_shpl = ShapelyPolygon( points_to_row_col_list(points)) self_shpl = ShapelyPolygon(self.exterior_np, holes=self.interior_np) intersections_shpl = self_shpl.buffer(0).intersection( clipping_window_shpl) mapping_shpl = mapping(intersections_shpl) except Exception: logger.warn('Polygon cropping exception, shapely.', exc_info=True) # raise # if polygon is invalid, just print warning and skip it # @TODO: need more investigation here return [] intersections = shapely_figure_to_coords_list(mapping_shpl) # Check for bad cropping cases (e.g. empty points list) out_polygons = [] for intersection in intersections: if isinstance(intersection, list) and len(intersection) > 0 and len( intersection[0]) >= 3: exterior = row_col_list_to_points(intersection[0], do_round=True) interiors = [] for interior_contour in intersection[1:]: if len(interior_contour) > 2: interiors.append( row_col_list_to_points(interior_contour, do_round=True)) out_polygons.append(Polygon(exterior, interiors)) return out_polygons
def __init__(self, exterior, interior, sly_id=None, class_id=None, labeler_login=None, updated_at=None, created_at=None): ''' :param exterior: list of PointLocation objects, the object contour is defined with these points :param interior: list of elements that has the same structure like the "exterior" field. This is the list of polygons that define object holes. ''' if len(exterior) < 3: exterior.extend([exterior[-1]] * (3 - len(exterior))) logger.warn('"{}" field must contain at least 3 points to create "Polygon" object.'.format(EXTERIOR)) #raise ValueError('"{}" field must contain at least 3 points to create "Polygon" object.'.format(EXTERIOR)) for element in interior: if len(element) < 3: logger.warn('"{}" interior field must contain at least 3 points to create "Polygon" object.'.format(element)) element.extend([element[-1]] * (3 - len(element))) #if any(len(element) < 3 for element in interior): # raise ValueError('"{}" element must contain at least 3 points.'.format(INTERIOR)) super().__init__(exterior, interior, sly_id=sly_id, class_id=class_id, labeler_login=labeler_login, updated_at=updated_at, created_at=created_at)