示例#1
0
def test_predict():
    weights_path = os.path.join(os.getcwd(), "model", "mask_rcnn_osm_0030.h5")
    assert os.path.isfile(weights_path)

    img_path = os.path.join(os.getcwd(), "test", "data", "18_139423_171197.tiff")
    p = Predictor(weights_path)
    polygon_points = p.predict_path(img_path=img_path)
    for points in polygon_points:
        p = Polygon(points)
        print(p.wkt)
    assert 1 == 1
示例#2
0
import math
from rest_framework.decorators import api_view
from rest_framework.parsers import JSONParser
from django.http import JsonResponse
from .serializers import InferenceRequestSerializer, InferenceRequest
from core.utils import get_contour, georeference, rectangularize
from core.predict import Predictor
import base64
import numpy as np
from PIL import Image
import io
from shapely import geometry
import traceback
from pycocotools import mask as cocomask

_predictor = Predictor(r"D:\_mapping-challenge\stage2_0.833.h5")
"""
Request format (url: localhost:8000/inference):
{
    "bbox": {
        "lat_min": 12,
        "lat_max": 12,
        "lon_min": 12,
        "lon_max": 12
    },
    "image_data": "123"
}
"""


@api_view(['GET', 'POST'])
示例#3
0
import uvicorn
from fastapi import FastAPI
from time import perf_counter

from core.predict import Predictor

app = FastAPI()
predictor = Predictor("data")


@app.post("/predict/{sentence}")
def predict(sentence: str):

    start = perf_counter()
    tokens, tags = predictor(sentence)
    end = perf_counter()
    return {
        "tokens": tokens,
        "tags": tags,
        "time_seconds": f"{end - start:5f}"
    }


if __name__ == "__main__":
    uvicorn.run(app, port=8000)
示例#4
0
from django.http import JsonResponse
from .serializers import InferenceRequestSerializer, InferenceRequest
from core.settings import IMAGE_WIDTH
from core.predict import Predictor
import os
import base64
import numpy as np
from PIL import Image
from pygeotile.tile import Tile, Point
import io
import tempfile
from shapely import geometry
import json
import traceback

_predictor = Predictor(
    os.path.join(os.getcwd(), "model", "mask_rcnn_osm_0076.h5"))
"""
Request format (url: localhost:8000/inference):
{
    "bbox": {
        "lat_min": 12,
        "lat_max": 12,
        "lon_min": 12,
        "lon_max": 12
    },
    "image_data": "123"
}
"""


@api_view(['GET', 'POST'])
示例#5
0
import numpy as np
from PIL import Image
import io
from shapely import geometry, wkt
import geojson
import traceback
import glob

model_path = r"D:\_models\mask_rcnn_osm_0100.h5"
if not os.path.isfile(model_path):
    models = glob.glob(os.path.join("/model", "**/*.h5"), recursive=True)
    if not models:
        raise RuntimeError("No models were found in the '/model' folder")
    else:
        model_path = models[0]
_predictor = Predictor(model_path)


"""
Request format (url: localhost:8000/inference):
{
    "bbox": {
        "lat_min": 12,
        "lat_max": 12,
        "lon_min": 12,
        "lon_max": 12
    },
    "image_data": "123"
}
"""