def post(self, model_id, prediction_id): try: payload = request.get_json() inferences = PredictionService.inferences(prediction_id) if payload.get("id") is None or payload.get("validity") is None: return err(400, "id and validity keys must be present"), 400 tile = PredictionTileService.get(payload["id"]) if tile is None: return err(404, "prediction tile not found"), 404 current = tile.validity if current is None: current = {} for inf in inferences: p = payload["validity"].get(inf) if p is None or type(p) is not bool: continue current[inf] = p PredictionTileService.validity(payload["id"], current) return current, 200 except Exception as e: error_msg = f'Unhandled error: {str(e)}' current_app.logger.error(error_msg) return (500, error_msg), 500
def get(self, model_id, prediction_id): """ TileJSON response for the predictions --- produces: - application/json parameters: - in: path name: model_id description: ID of the Model required: true type: integer - in: path name: prediction_id description: ID of the Prediction required: true type: integer responses: 200: description: ID of the prediction 400: description: Invalid Request 500: description: Internal Server Error """ try: return PredictionTileService.tilejson(model_id, prediction_id) except PredictionsNotFound: return err(404, "Prediction TileJSON not found"), 404 except Exception as e: error_msg = f'Unhandled error: {str(e)}' current_app.logger.error(error_msg) return err(500, error_msg), 500
def post(self, project_id, prediction_id): """ Import a file of GeoJSON inferences into the prediction Typically used to seed TFRecord creation preceding model creation --- produces: - application/json responses: 200: description: ID of the prediction 400: description: Invalid Request 500: description: Internal Server Error """ files = list(request.files.keys()) if len(files) == 0: return err(400, "File not found in request"), 400 inferences = request.files[files[0]] try: pred = PredictionService.get_prediction_by_id(prediction_id) infstream = io.BytesIO() inferences.save(infstream) inferences = infstream.getvalue().decode("UTF-8").split("\n") data = [] for inf in inferences: if len(inf) == 0: continue data.append(geojson.loads(inf)) PredictionTileService.create_geojson(pred, data) except InvalidGeojson as e: return err(400, str(e)), 400 except PredictionsNotFound: return err(404, "Predictions not found"), 404 except Exception as e: current_app.logger.error(traceback.format_exc()) error_msg = f"Unhandled error: {str(e)}" return err(500, error_msg), 500
def get(self, model_id): """ Get aggregated prediction tile for a model within the supplied bbox and tile size --- produces: - application/json parameters: - in: path name: model_id description: ID of the Model required: true type: integer - in: query name: bbox description: bbox in the wsen format. Comma separated floats required: true type: string - in: query name: zoom description: zoom level for specifying aggregate tile size required: true type: integer responses: 200: description: List of all predictions for the model within supplied bbox 404: description: No predictions found 500: description: Internal Server Error """ try: bbox = request.args.get('bbox', '') zoom = request.args.get('zoom', '') if (bbox is None or bbox == ''): return {"error": 'A bbox is required'}, 400 if (zoom is None or zoom == ''): return {"error": 'Zoom level is required for aggregation'} # check if this model exists ml_model_dto = MLModelService.get_ml_model_by_id(model_id) tiles = PredictionTileService.get_aggregated_tiles( ml_model_dto.model_id, bbox, zoom) return tiles, 200 except NotFound: return {"error": "Model not found"}, 404 except PredictionsNotFound: return {"error": "No predictions for this bbox"}, 404 except ValueError as e: return {"error": str(e)}, 400 except Exception as e: error_msg = f'Unhandled error: {str(e)}' current_app.logger.error(error_msg) return {"error": error_msg}, 500
def get(self, project_id, prediction_id, z, x, y): """ Mapbox Vector Tile Response --- produces: - application/x-protobuf parameters: - in: path name: project_id description: ID of the Model required: true type: integer - in: path name: prediction_id description: ID of the Prediction required: true type: integer - in: path name: z description: zoom of the tile to fetch required: true type: integer - in: path name: y description: y coord of the tile to fetch required: true type: integer - in: path name: x description: x coord of the tile to fetch required: true type: integer responses: 200: description: ID of the prediction 400: description: Invalid Request 500: description: Internal Server Error """ try: tile = PredictionTileService.mvt(project_id, prediction_id, z, x, y) response = make_response(tile) response.headers["content-type"] = "application/x-protobuf" return response except PredictionsNotFound: return err(404, "Prediction tile not found"), 404 except Exception as e: current_app.logger.error(traceback.format_exc()) error_msg = f"Unhandled error: {str(e)}" return err(500, error_msg), 500
def get(self, model_id, prediction_id, z, x, y): """ TileJSON response for the predictions --- produces: - application/x-protobuf parameters: - in: path name: model_id description: ID of the Model required: true type: integer - in: path name: prediction_id description: ID of the Prediction required: true type: integer - in: path name: z description: zoom of the tile to fetch required: true type: integer - in: path name: y description: y coord of the tile to fetch required: true type: integer - in: path name: x description: x coord of the tile to fetch required: true type: integer responses: 200: description: ID of the prediction 400: description: Invalid Request 500: description: Internal Server Error """ try: tile = PredictionTileService.mvt(model_id, prediction_id, z, x, y) response = make_response(tile) response.headers['content-type'] = 'application/x-protobuf' return response except PredictionsNotFound: return {"error": "Prediction tile not found"}, 404 except Exception as e: error_msg = f'Unhandled error: {str(e)}' current_app.logger.error(error_msg) return {"error": error_msg}, 500
def post(self, model_id: int): """ Aggregate ml predictions for polygons in the supplied GeoJSON --- produces: - application/json parameters: - in: body name: body required: true type: string description: GeoJSON FeatureCollection of Polygons responses: 200: description: GeoJSON FeatureCollection with prediction data in properties 404: description: Model not found 400: description: Invalid Request 500: description: Internal Server Error """ try: # FIXME - validate the geojson data = request.get_json() if validate_geojson(data) is False: raise InvalidGeojson # check if the model exists ml_model_dto = MLModelService.get_ml_model_by_id(model_id) # get the bbox the geojson bbox = geojson_bounds(data) prediction_tile_geojson = PredictionTileService.get_aggregated_tiles_geojson( ml_model_dto.model_id, bbox, data) return prediction_tile_geojson, 200 except InvalidGeojson: return {"error": "Invalid GeoJSON"}, 400 except NotFound: return {"error": "Model not found"}, 404 except PredictionsNotFound: return {"error": "No predictions for this bbox"}, 404 except Exception as e: error_msg = f'Unhandled error: {str(e)}' current_app.logger.error(error_msg) return {"error": error_msg}, 500
def post(self, prediction_id): """ Submit tile level predictions --- produces: - application/json parameters: - in: body name: body required: true type: string description: JSON object of predictions schema: properties: predictionId: type: integer description: Prediction ID required: true predictions: type: array items: type: object schema: properties: quadkey: type: string description: quadkey of the tile required: true centroid: type: array items: type: float required: true predictions: type: object schema: properties: ml_prediction: type: float responses: 200: description: ID of the prediction 400: description: Invalid Request 500: description: Internal Server Error """ try: data = request.get_json() if (len(data['predictions']) == 0): return err(400, "Error validating request"), 400 PredictionTileService.create(data) return {"prediction_id": prediction_id}, 200 except PredictionsNotFound: return err(404, "Prediction not found"), 404 except Exception as e: error_msg = f'Unhandled error: {str(e)}' current_app.logger.error(error_msg) return err(500, error_msg), 500