def detect_from_image( self, image: np.ndarray, output_image: bool = False ) -> ObjectDetectionRawResult: resized_image = resize_image(image, settings.OBJECT_DETECTION_IMAGE_MAX_SIZE) image_array = convert_image_to_array(resized_image) data = { "signature_name": "serving_default", "instances": np.expand_dims(image_array, 0).tolist(), } r = http_session.post( "{}/{}:predict".format(settings.TF_SERVING_BASE_URL, self.name), json=data ) r.raise_for_status() response = r.json() prediction = response["predictions"][0] num_detections = int(prediction["num_detections"]) detection_classes = np.array(prediction["detection_classes"], dtype=np.uint8) detection_scores = np.array(prediction["detection_scores"]) detection_boxes = np.array(prediction["detection_boxes"]) result = ObjectDetectionRawResult( num_detections=num_detections, detection_classes=detection_classes, detection_boxes=detection_boxes, detection_scores=detection_scores, detection_masks=None, category_index=self.category_index, ) if output_image: add_boxes_and_labels(image_array, result) return result
def add_logos_to_ann(image: ImageModel, logos: List[LogoAnnotation]) -> int: if not logos: return 0 image_url = settings.OFF_IMAGE_BASE_URL + image.source_image data = { "image_url": image_url, "logos": [{ "bounding_box": logo.bounding_box, "id": logo.id } for logo in logos], } r = http_session.post( settings.BaseURLProvider().robotoff().get() + "/api/v1/ann/add", json=data, timeout=30, ) if not r.ok: logger.warning( f"error while adding image to ANN ({r.status_code}): %s", r.text) return 0 return r.json()["added"]
def save_insight(insight_id: str, annotation: int): params = { "insight_id": insight_id, "annotation": str(annotation), } r = http_session.post(ANNOTATE_INSIGHT_URL, data=params) data = r.json() return data
def select_rotate_image( barcode: str, image_id: str, image_key: Optional[str] = None, rotate: Optional[int] = None, server_domain: Optional[str] = None, auth: Optional[OFFAuthentication] = None, timeout: Optional[int] = 15, ): if server_domain is None: server_domain = settings.OFF_SERVER_DOMAIN url = get_product_image_select_url(server_domain) cookies = None params = { "code": barcode, "imgid": image_id, } if rotate is not None: if rotate not in (90, 180, 270): raise ValueError("invalid value for rotation angle: {}".format(rotate)) params["angle"] = str(rotate) if image_key is not None: params["id"] = image_key if auth is not None: if auth.session_cookie: cookies = { "session": auth.session_cookie, } elif auth.username and auth.password: params["user_id"] = auth.username params["password"] = auth.password else: params.update(settings.off_credentials()) if cookies is None and not params.get("password"): raise ValueError( "a password or a session cookie is required to select an image" ) request_auth: Optional[Tuple[str, str]] = None if server_domain.endswith("openfoodfacts.net"): # dev environment requires authentication request_auth = ("off", "off") r = http_session.post( url, data=params, auth=request_auth, cookies=cookies, timeout=timeout ) r.raise_for_status() return r
def _post_message( text: str, channel: str, attachments: Optional[List[JSONType]] = None, **kwargs ): raise_if_slack_token_undefined() params: JSONType = {**get_base_params(), "channel": channel, "text": text, **kwargs} if attachments: params["attachments"] = attachments r = http_session.post(POST_MESSAGE_URL, data=params) response_json = get_slack_json(r) return response_json
def _post_message( self, blocks: List[Dict], channel: str, **kwargs, ): try: params: JSONType = { **(self._get_base_params()), "channel": channel, "blocks": json.dumps(blocks), **kwargs, } r = http_session.post(self.POST_MESSAGE_URL, data=params) response_json = _get_slack_json(r) return response_json except Exception as e: logger.error( "An exception occurred when sending a Slack notification", exc_info=e)
def predict_from_barcode( self, barcode: str) -> Optional[List[CategoryPrediction]]: if not self.loaded: self.load() product = get_product(barcode, fields=["product_name", "ingredients_tags"]) if product is None: logger.info("Product {} not found".format(barcode)) return None X = self.get_input_from_products([product])[0] X = [X[0].tolist(), X[1].tolist()] data = {"signature_name": "serving_default", "instances": [X]} r = http_session.post("{}/{}:predict".format( settings.TF_SERVING_BASE_URL, self.NAME), json=data) r.raise_for_status() response = r.json() return response
def predict(self, product: Dict, deepest_only: bool = False) -> List[Prediction]: """Returns an unordered list of category predictions for the given product. :param deepest_only: controls whether the returned list should only contain the deepmost categories for a predicted taxonomy chain. For example, if we predict 'fresh vegetables' -> 'legumes' -> 'beans' for a product, setting deepest_only=True will return ['beans']. """ # model was train with product having a name if not product.get("product_name"): return [] # ingredients are not mandatory, just insure correct type product.setdefault("ingredients_tags", []) data = { "signature_name": "serving_default", "instances": [ { "ingredient": product["ingredients_tags"], "product_name": [product["product_name"]], } ], } r = http_session.post( f"{settings.TF_SERVING_BASE_URL}/category-classifier:predict", json=data ) r.raise_for_status() response = r.json() # Since we only sent one product in the query, we can be guaranteed that only one # prediction is returned by TF Serving. prediction = response["predictions"][0] # The response is always in the form: # "predictions": [ # { # "output_mapper_layer": [0.868871808, 0.801418602, ...], # "output_mapper_layer_1": ["en:seafood", "en:fishes", ....], # } # ] # # where 'output_mapper_layer' is the confidence score for a prediction in descending order. # 'output_mapper_layer_1' is the category for the prediction score above. # # The model only returns top 50 predictions. category_predictions = [] # We only consider predictions with a confidence score of 0.5 and above. for idx, confidence in enumerate(prediction["output_mapper_layer"]): if confidence >= 0.5: category_predictions.append( CategoryPrediction( category=prediction["output_mapper_layer_1"][idx], confidence=prediction["output_mapper_layer"][idx], ) ) else: break if deepest_only: predicted_dict = {p.category: p for p in category_predictions} taxonomy_nodes = [self.taxonomy[p.category] for p in category_predictions] category_predictions = [ predicted_dict[x.id] for x in self.taxonomy.find_deepest_nodes(taxonomy_nodes) ] return [ category_prediction.to_prediction() for category_prediction in category_predictions ]