Esempio n. 1
0
def nodata_stats(in_name, bounds):
    src = rasterio.open(in_name)
    nodata = src.nodata

    src_ext = GeoUtils.BoundingBox_to_extent(src.bounds)
    bounds_ext = GeoUtils.BoundingBox_to_extent(
        BoundingBox(*transform_bounds("EPSG:4326", src.crs, *bounds)))

    if not GeoUtils.extents_intersects(src_ext, bounds_ext):
        return 1

    # bounds in the src crs projection
    # (left, bottom, right, top)
    # double check that bounds belong to image
    transformed_bounds = GeoUtils.extent_to_BoundingBox(
        GeoUtils.extent_intersection(src_ext, bounds_ext))

    # calculate window to read from the input tiff
    actual_window = GeoUtils.bounds_to_windows(transformed_bounds, src)

    bands = src.read(window=actual_window)

    return sum([np.count_nonzero(band == nodata)
                for band in bands]) / src.count
Esempio n. 2
0
max_shadows = float(imagery_config['max_shadows'])
max_nodata = float(imagery_config['max_nodata'])
output_path = imagery_config['catalog_path']

# logger
# logger = logging.getLogger(__x name__)
# logger.setLevel(logging.INFO)
# logging.basicConfig(format = '%(message)s', datefmt = '%m-%d %H:%M')

# pclient init
pclient = PClientV1(api_key, config)

# # build a valid dt string from a month number
# def dt_construct(month, day = 1, year = 2018, t = "00:00:00.000Z"):
#     return "{}-{:02d}-{:02d}T{}".format(year, month, day, t)
aoi = GeoUtils.define_aoi(x, y, cellSize)  # aoi by a cell grid x, y
# print(aoi)

geom = {}
scene_id = ''
output_file = ''
output_localfile = ''
# tms_uri = ''

# start_date = dt_construct(month = mo_start, day = day_start, year = yr_start)
# end_date = dt_construct(month = mo_end, day = day_end, year = yr_end)
# step = datetime.timedelta(days=1)
start = datetime.datetime(yr_start, mo_start, day_start, 0, 0, 0)
# start2 = datetime.datetime(yr_start, mo_start, day_start, 23, 59, 59)
end = datetime.datetime(yr_end, mo_end, day_end, 0, 0, 0, 0)
# step = datetime.timedelta(days=1)
Esempio n. 3
0
import waymap
import googlemaps

import pandas as pd

sys.path.append("../route")
sys.path.append("../utils")
sys.path.append("../config")

from geo_utils import GeoUtils
from pymongo import MongoClient
from configuration import Configuration
from polyline_route import PolylineRoute

""" Creating required instances."""
utils = GeoUtils()
config = Configuration()
gmaps = googlemaps.Client(key=config.API_KEY)
mongo_client = MongoClient(host=config.MONGO_HOST, port=config.MONGO_PORT)

""" User input."""
source = sys.argv[1]
destination = sys.argv[2]
mode = sys.argv[3]
_id = "{}_{}_{}".format(source.lower(), destination.lower(), mode)

""" Creating mongo connection."""
db = mongo_client[config.MONGO_TBT_DB]
coll = db[config.MONGO_ROUTE_COLLECTION]

if not coll.find_one({"_id":_id}):
Esempio n. 4
0
def main_csv():
    master_grid = rasterio.open(master_grid_path)
    # (xsize, ysize)
    cellSize = [s * cellgrid_buffer for s in master_grid.res]

    # sequence of cellgrids to walkthrough
    cell_grids = []

    # build an SRTree
    idx = index.Index()
    reader = csv.reader(open(csv_points))
    next(reader, None)  # skip headers
    for line in reader:
        (cell_id, x, y, cell_name) = line
        typed_line = (int(cell_id), float(x), float(y), cell_name)
        extent = GeoUtils.define_extent(float(x), float(y), cellSize)
        idx.insert(
            int(cell_id),
            (extent['xmin'], extent['ymin'], extent['xmax'], extent['ymax']),
            obj=typed_line)
        cell_grids.append(typed_line)

    actual_extent = GeoUtils.BoundingBox_to_extent(master_grid.bounds)

    # split bands into separate numpy arrays
    # 1, 2, 3, 4, 5, 6, 7
    cell_id_band, country_code_band, country_id_band, ds_s_band, ds_e_band, ws_s_band, ws_e_band = master_grid.read(
    )

    # valid cell_grids / already seen and valid ones
    valid_cell_grids = {
        GS: np.full(cell_id_band.shape, False),
        OS: np.full(cell_id_band.shape, False)
    }

    # output CSV file
    fp = codecs.open(
        filename=pclient.output_filename,
        mode=csv_mode,
        encoding=pclient.output_encoding)  # buffering = 20*(1024**2)
    if csv_only:
        fp = codecs.open(filename=pclient.output_filename_csv,
                         mode=csv_mode,
                         encoding=pclient.output_encoding)

    writer = csv.writer(fp)
    writer.writerow(csv_header)

    for line in cell_grids:
        (cell_id, x, y, cell_name) = line
        r, c = master_grid.index(x, y)

        # check if we have already processes this cell_id
        if not valid_cell_grids[GS][r, c]:
            if psqlclient.exists_row(cell_id=cell_id, season=GS):
                valid_cell_grids[GS][r, c] = True

        if not valid_cell_grids[OS][r, c]:
            if psqlclient.exists_row(cell_id=cell_id, season=OS):
                valid_cell_grids[OS][r, c] = True

        skip_gs, skip_os = valid_cell_grids[GS][r, c], valid_cell_grids[OS][r,
                                                                            c]
        skip_row = skip_gs and skip_os

        if not skip_row:
            country_code, country_id = country_code_band[r,
                                                         c], country_id_band[r,
                                                                             c]
            ds_start, ds_end = ds_s_band[r, c], ds_e_band[r, c]
            ws_start, ws_end = ws_s_band[r, c], ws_e_band[r, c]
            seasons = [(GS, ws_start, ws_end),
                       (OS, ds_start, ds_end)]  # dates ranges for the loop
            valid_dates = (ds_start >= 0 and ds_start <= 12) and (
                ds_end >= 0 and ds_end <= 12) and (
                    ws_start >= 0 and ws_start <= 12) and (ws_end >= 0
                                                           and ws_end <= 12)

            if not valid_dates:
                logger.info(
                    "cell_id {} is not valid (bad dates data, check the AOI you are querying for)"
                    .format(cell_id))
            else:
                # GS and OS images should be of the same year
                current_year_start = datetime.today().year
                current_year_end = current_year_start

                logger.info("Processing cell_id {}...".format(cell_id))

                aoi = GeoUtils.define_aoi(x, y,
                                          cellSize)  # aoi by a cell grid x, y

                psql_rows = []
                for (season_type, m_start, m_end) in seasons:
                    if not valid_cell_grids[season_type][r, c]:
                        logger.info(
                            "Processing season {}...".format(season_type))

                        geom = {}
                        scene_id = ''
                        output_file = ''
                        output_localfile = ''
                        tms_uri = ''

                        # planet analytic_sr stores imagery starting from 2016 year
                        years = list(range(2016, current_year_start + 1))
                        years.reverse()

                        for yr in years:
                            yr_start = yr
                            yr_end = yr

                            if m_start <= m_end:
                                yr_start = yr
                                yr_end = yr
                            else:
                                yr_start = yr - 1
                                yr_end = yr

                            planet_filters = pclient.set_filters_sr(
                                aoi,
                                start_date=dt_construct(month=m_start,
                                                        year=yr_start),
                                end_date=dt_construct(month=m_end,
                                                      year=yr_end))
                            res = pclient.request_intersecting_scenes(
                                planet_filters)

                            # pick up scene id and its geometry
                            for item in res.items_iter(pclient.maximgs):
                                # each item is a GeoJSON feature
                                geom = shape(
                                    geojson.loads(json.dumps(
                                        item["geometry"])))
                                scene_id = item["id"]
                                # cleanup local catalog to remove previous iterations files
                                pclient.cleanup_catalog()
                                # activation & download
                                # it should be sync, to allow async check of neighbours
                                output_localfile, output_file = pclient.download_localfs_s3(
                                    scene_id, season=season_type)

                                bbox_local = GeoUtils.define_BoundingBox(
                                    x, y, cellSize)
                                # nodata percentage
                                nodata_perc = nodata_stats_wraped(
                                    output_localfile, bbox_local)
                                # use custom cloud detection function to calculate clouds and shadows
                                cloud_perc, shadow_perc = cloud_shadow_stats_config_wraped(
                                    output_localfile, bbox_local, cloud_config)
                                # check if cell grid is good enough
                                if (cloud_perc <= pclient.max_clouds
                                        and shadow_perc <= pclient.max_shadows
                                        and nodata_perc <= pclient.max_nodata):
                                    break
                                else:
                                    scene_id = ''

                            # record success year
                            if (scene_id != ''):
                                current_year_start = yr_start
                                current_year_end = yr_end
                                break

                        # mark the current cell grid as already seen
                        if (scene_id != ''):
                            valid_cell_grids[season_type][r, c] = True

                            tms_uri = rfclient.create_tms_uri(
                                scene_id, output_file)
                            base_row = [
                                cell_id, scene_id, c, r, season_type,
                                output_file, tms_uri
                            ]
                            writer.writerow(base_row)
                            psql_rows.append(
                                ('planet', scene_id, str(cell_id), season_type,
                                 str(c), str(r), output_file, tms_uri))

                            # logger.debug(base_row)
                            # extent of a polygon to query neighbours
                            # (minx, miny, maxx, maxy)
                            # geom.bounds
                            base_ext = GeoUtils.extent_intersection(
                                actual_extent,
                                GeoUtils.polygon_to_extent(geom))

                            def sync(sub_row):
                                sub_cell_id, sx, sy, sub_name = sub_row.object

                                global_sub_row, global_sub_col = master_grid.index(
                                    sx, sy)

                                # polygon to check would it intersect initial AOI
                                sub_poly = GeoUtils.define_polygon(
                                    sx, sy, cellSize)

                                skip_sub_row = valid_cell_grids[season_type][
                                    sr, sc]

                                if not skip_sub_row:
                                    # read all metadata
                                    sub_country_code, sub_country_id = country_code_band[
                                        sr, sc], country_id_band[sr, sc]
                                    sub_ds_start, sub_ds_end = ds_s_band[
                                        sr, sc], ds_e_band[sr, sc]
                                    sub_ws_start, sub_ws_end = ws_s_band[
                                        sr, sc], ws_e_band[sr, sc]
                                    sub_seasons = [
                                        (GS, sub_ws_start, sub_ws_end),
                                        (OS, sub_ds_start, sub_ds_end)
                                    ]  # dates ranges for the loop

                                    # neighbours should be in the same period, otherwise we'll try to fetch them later
                                    if (seasons == sub_seasons):
                                        logger.info(
                                            "Processing sub cell_id {}...".
                                            format(sub_cell_id))

                                        sub_aoi = GeoUtils.define_aoi(
                                            sx, sy, cellSize
                                        )  # aoi by a cell grid x, y

                                        # query planet api and check would this cell grid have good enough cloud coverage for this cell grid
                                        sub_planet_filters = pclient.set_filters_sr(
                                            sub_aoi,
                                            start_date=dt_construct(
                                                month=m_start,
                                                year=current_year_start),
                                            end_date=dt_construct(
                                                month=m_end,
                                                year=current_year_end),
                                            id=scene_id)
                                        res = pclient.request_intersecting_scenes(
                                            sub_planet_filters)

                                        bbox_local = GeoUtils.define_BoundingBox(
                                            sx, sy, cellSize)
                                        # nodata percentage
                                        sub_nodata_perc = nodata_stats_wraped(
                                            output_localfile, bbox_local)
                                        # use custom cloud detection function to calculate clouds and shadows
                                        sub_cloud_perc, sub_shadow_perc = cloud_shadow_stats_config_wraped(
                                            output_localfile, bbox_local,
                                            cloud_config)
                                        # check if cell grid is good enough
                                        if (sub_cloud_perc <=
                                                pclient.max_clouds
                                                and sub_shadow_perc <=
                                                pclient.max_shadows
                                                and sub_nodata_perc <=
                                                pclient.max_nodata):
                                            # flag to avoid extra lookup into array
                                            sub_valid = False
                                            # select the only one image as it's the only one
                                            for item in res.items_iter(1):
                                                valid_cell_grids[season_type][
                                                    sr, sc] = True
                                                sub_valid = True

                                            if sub_valid:
                                                sub_global_row, sub_global_col = master_grid.index(
                                                    sx, sy)
                                                base_sub_row = [
                                                    sub_cell_id, scene_id,
                                                    sub_global_col,
                                                    sub_global_row,
                                                    season_type, output_file,
                                                    tms_uri
                                                ]
                                                writer.writerow(base_sub_row)
                                                psql_rows.append(
                                                    ('planet', scene_id,
                                                     str(sub_cell_id),
                                                     season_type,
                                                     str(sub_global_col),
                                                     str(sub_global_row),
                                                     output_file, tms_uri))

                            # query cellgrid neighbours
                            # and walk through all cellgrid neighbours
                            for sub_row in idx.intersection(
                                (base_ext['xmin'], base_ext['ymin'],
                                 base_ext['xmax'], base_ext['ymax']),
                                    objects=True):
                                neighbours_executor.submit(sync, sub_row)

                            # await all neighbours
                            neighbours_executor.drain()
                            # await all downloads
                            pclient.drain()
                            # insert everything into psql
                            psqlclient.insert_rows_by_one_async(psql_rows)
                            # cleanup local catalog
                            pclient.cleanup_catalog()
                            # refresh RF token
                            rfclient.refresh()

    # await threadpool to stop
    neighbours_executor.close()
    fp.close()
    if csv_only:
        pclient.upload_s3_csv_csv()
        pclient.close()

    psqlclient.close()
    print("-------------------")
    print("CSV DONE")
    print("-------------------")
Esempio n. 5
0
                                def sync(sr, sc):
                                    # sub centroid
                                    sx, sy = transform.xy(
                                        actual_transform, sr, sc)

                                    # polygon to check would it intersect initial AOI
                                    sub_poly = GeoUtils.define_polygon(
                                        sx, sy, cellSize)

                                    # skip sub_row conditions
                                    skip_sub_row = False
                                    if actual_aoi.contains(sub_poly) or test:
                                        skip_sub_row = valid_band[season_type][
                                            sr, sc]
                                    else:
                                        skip_sub_row = True

                                    if not skip_sub_row:
                                        # read all metadata
                                        sub_cell_id = cell_id_band[sr, sc]
                                        sub_country_code, sub_country_id = country_code_band[
                                            sr, sc], country_id_band[sr, sc]
                                        sub_ds_start, sub_ds_end = ds_s_band[
                                            sr, sc], ds_e_band[sr, sc]
                                        sub_ws_start, sub_ws_end = ws_s_band[
                                            sr, sc], ws_e_band[sr, sc]
                                        sub_seasons = [
                                            (GS, sub_ws_start, sub_ws_end),
                                            (OS, sub_ds_start, sub_ds_end)
                                        ]  # dates ranges for the loop

                                        # neighbours should be in the same period, otherwise we'll try to fetch them later
                                        if (seasons == sub_seasons):
                                            logger.info(
                                                "Processing sub cell_id {}...".
                                                format(sub_cell_id))

                                            sub_aoi = GeoUtils.define_aoi(
                                                sx, sy, cellSize
                                            )  # aoi by a cell grid x, y

                                            # query planet api and check would this cell grid have good enough cloud coverage for this cell grid
                                            sub_planet_filters = pclient.set_filters_sr(
                                                sub_aoi,
                                                start_date=dt_construct(
                                                    month=m_start,
                                                    year=current_year_start),
                                                end_date=dt_construct(
                                                    month=m_end,
                                                    year=current_year_end),
                                                id=scene_id)
                                            res = pclient.request_intersecting_scenes(
                                                sub_planet_filters)

                                            sub_bbox_local = GeoUtils.define_BoundingBox(
                                                sx, sy, cellSize)
                                            # nodata percentage
                                            sub_nodata_perc = nodata_stats_wraped(
                                                output_localfile,
                                                sub_bbox_local)
                                            # use custom cloud detection function to calculate clouds and shadows
                                            sub_cloud_perc, sub_shadow_perc = cloud_shadow_stats_config_wraped(
                                                output_localfile,
                                                GeoUtils.define_BoundingBox(
                                                    sx, sy, cellSize),
                                                cloud_config)
                                            # check if cell grid is good enough
                                            if (sub_cloud_perc <=
                                                    pclient.max_clouds
                                                    and sub_shadow_perc <=
                                                    pclient.max_shadows
                                                    and sub_nodata_perc <=
                                                    pclient.max_nodata):
                                                # flag to avoid extra lookup into array
                                                sub_valid = False
                                                # select the only one image as it's the only one
                                                for item in res.items_iter(1):
                                                    valid_band[season_type][
                                                        sr, sc] = True
                                                    sub_valid = True

                                                if sub_valid:
                                                    sub_global_row, sub_global_col = master_grid.index(
                                                        sx, sy)
                                                    base_sub_row = [
                                                        sub_cell_id, scene_id,
                                                        sub_global_col,
                                                        sub_global_row,
                                                        season_type,
                                                        output_file, tms_uri
                                                    ]
                                                    writer.writerow(
                                                        base_sub_row)
                                                    psql_rows.append(
                                                        ('planet', scene_id,
                                                         str(sub_cell_id),
                                                         season_type,
                                                         str(sub_global_col),
                                                         str(sub_global_row),
                                                         output_file, tms_uri))
Esempio n. 6
0
def main_json():
    ext = GeoUtils.polygon_to_extent(actual_aoi)

    if test:
        # ext = GeoUtils.define_extent(30, -2, (0.03, 0.03)) # some test AOI to select a subset of extent from the master_grid.tiff
        ext = GeoUtils.define_extent(36.897016, 0.292392,
                                     (0.01, 0.01))  #mpala test
        # ext = {
        #     "xmin": 27.03,
        #     "ymin": -25.97,
        #     "xmax": 27.08,
        #     "ymax": -25.99
        # }

    # 1. Cell ID: this should be unique for the whole raster
    # 2. Country code: integerized country code
    # 3. Country ID: unique cell number within each country
    # 4. dry season start month
    # 5. dry season end month
    # 6. wet season start month
    # 7. wet season end month
    # 2 and 3 combined link to the unique name field in the current ***REMOVED*** database
    master_grid = rasterio.open(master_grid_path)
    # (xsize, ysize)
    cellSize = [s * cellgrid_buffer for s in master_grid.res]
    rows, cols = master_grid.shape  # all pixels of initial master grid
    bounds = master_grid.bounds
    # left = xmin, bottom = ymin, right = xmax, top = ymax
    actual_bounds = GeoUtils.extent_to_BoundingBox(
        GeoUtils.extent_intersection(GeoUtils.BoundingBox_to_extent(bounds),
                                     ext))
    actual_extent = GeoUtils.BoundingBox_to_extent(actual_bounds)

    # returns row, col
    actual_window = GeoUtils.bounds_to_windows(actual_bounds, master_grid)
    ((start_row, stop_row), (start_col, stop_col)) = actual_window
    actual_window_width, actual_window_height = stop_col - start_col, stop_row - start_row

    # transofmration for AOI
    actual_transform = transform.from_bounds(
        actual_bounds.left, actual_bounds.bottom, actual_bounds.right,
        actual_bounds.top, actual_window_width, actual_window_height)

    # split bands into separate numpy arrays
    # 1, 2, 3, 4, 5, 6, 7
    cell_id_band, country_code_band, country_id_band, ds_s_band, ds_e_band, ws_s_band, ws_e_band = master_grid.read(
        window=actual_window)

    # extra band with information about already seen cells
    valid_band = {
        GS: np.full(cell_id_band.shape, False),
        OS: np.full(cell_id_band.shape, False)
    }

    # output CSV file
    fp = codecs.open(
        filename=pclient.output_filename,
        mode=csv_mode,
        encoding=pclient.output_encoding)  # buffering = 20*(1024**2)
    writer = csv.writer(fp)
    if not (with_csv and csv_only):
        writer.writerow(csv_header)

    # logger.info(range(actual_window_height))
    # logger.info(range(actual_window_width))

    for r in range(actual_window_height):
        for c in range(actual_window_width):
            cell_id = cell_id_band[r, c]

            # cell grid centroid
            x, y = transform.xy(actual_transform, r, c)

            # polygon to check would it intersect initial AOI
            poly = GeoUtils.define_polygon(x, y, cellSize)

            # skip row conditions
            skip_row = False
            if actual_aoi.contains(poly) or test:
                # check if we have already processes this cell_id
                if not valid_band[GS][r, c]:
                    if psqlclient.exists_row(cell_id=cell_id, season=GS):
                        valid_band[GS][r, c] = True

                if not valid_band[OS][r, c]:
                    if psqlclient.exists_row(cell_id=cell_id, season=OS):
                        valid_band[OS][r, c] = True

                skip_gs, skip_os = valid_band[GS][r, c], valid_band[OS][r, c]

                skip_row = skip_gs and skip_os
            else:
                skip_row = True

            if not skip_row:
                # read all metadata
                country_code, country_id = country_code_band[
                    r, c], country_id_band[r, c]
                ds_start, ds_end = ds_s_band[r, c], ds_e_band[r, c]
                ws_start, ws_end = ws_s_band[r, c], ws_e_band[r, c]
                seasons = [(GS, ws_start, ws_end),
                           (OS, ds_start, ds_end)]  # dates ranges for the loop
                # check if dates are months, the only way to check data
                valid_dates = (ds_start >= 0 and ds_start <= 12) and (
                    ds_end >= 0 and ds_end <= 12) and (
                        ws_start >= 0
                        and ws_start <= 12) and (ws_end >= 0 and ws_end <= 12)

                if not valid_dates:
                    logger.info(
                        "cell_id {} is not valid (bad dates data, check the AOI you are querying for)"
                        .format(cell_id))
                else:
                    # GS and OS images should be of the same year
                    current_year_start = datetime.today().year
                    current_year_end = current_year_start

                    logger.info("Processing cell_id {}...".format(cell_id))

                    aoi = GeoUtils.define_aoi(
                        x, y, cellSize)  # aoi by a cell grid x, y

                    psql_rows = []
                    for (season_type, m_start, m_end) in seasons:
                        if not valid_band[season_type][r, c]:
                            logger.info(
                                "Processing season {}...".format(season_type))

                            geom = {}
                            scene_id = ''
                            output_file = ''
                            output_localfile = ''
                            tms_uri = ''

                            # planet analytic_sr stores imagery starting from 2016 year
                            years = list(range(2016, current_year_start + 1))
                            years.reverse()

                            for yr in years:
                                yr_start = yr
                                yr_end = yr

                                if m_start <= m_end:
                                    yr_start = yr
                                    yr_end = yr
                                else:
                                    yr_start = yr - 1
                                    yr_end = yr

                                planet_filters = pclient.set_filters_sr(
                                    aoi,
                                    start_date=dt_construct(month=m_start,
                                                            year=yr_start),
                                    end_date=dt_construct(month=m_end,
                                                          year=yr_end))
                                res = pclient.request_intersecting_scenes(
                                    planet_filters)

                                # pick up scene id and its geometry
                                for item in res.items_iter(pclient.maximgs):
                                    # each item is a GeoJSON feature
                                    geom = shape(
                                        geojson.loads(
                                            json.dumps(item["geometry"])))
                                    scene_id = item["id"]
                                    # cleanup local catalog to remove previous iterations files
                                    pclient.cleanup_catalog()
                                    # activation & download
                                    # it should be sync, to allow async check of neighbours
                                    output_localfile, output_file = pclient.download_localfs_s3(
                                        scene_id, season=season_type)

                                    bbox_local = GeoUtils.define_BoundingBox(
                                        x, y, cellSize)
                                    # nodata percentage
                                    nodata_perc = nodata_stats_wraped(
                                        output_localfile, bbox_local)
                                    # use custom cloud detection function to calculate clouds and shadows
                                    cloud_perc, shadow_perc = cloud_shadow_stats_config_wraped(
                                        output_localfile, bbox_local,
                                        cloud_config)
                                    # check if cell grid is good enough
                                    if (cloud_perc <= pclient.max_clouds and
                                            shadow_perc <= pclient.max_shadows
                                            and
                                            nodata_perc <= pclient.max_nodata):
                                        break
                                    else:
                                        scene_id = ''

                                # record success year
                                if (scene_id != ''):
                                    current_year_start = yr_start
                                    current_year_end = yr_end
                                    break

                            # mark the current cell grid as already seen
                            if (scene_id != ''):
                                valid_band[season_type][r, c] = True
                                tms_uri = rfclient.create_tms_uri(
                                    scene_id, output_file)
                                global_row, global_col = master_grid.index(
                                    x, y)
                                base_row = [
                                    cell_id, scene_id, global_col, global_row,
                                    season_type, output_file, tms_uri
                                ]
                                writer.writerow(base_row)
                                psql_rows.append(
                                    ('planet', scene_id, str(cell_id),
                                     season_type, str(global_col),
                                     str(global_row), output_file, tms_uri))

                                # logger.debug(base_row)
                                # extent of a polygon to query neighbours
                                # (minx, miny, maxx, maxy)
                                # geom.bounds
                                base_ext = GeoUtils.extent_intersection(
                                    actual_extent,
                                    GeoUtils.polygon_to_extent(geom))

                                # walk through all cellgrid neighbours
                                # get all row, cals intersection by
                                ((sub_start_row, sub_stop_row),
                                 (sub_start_col,
                                  sub_stop_col)) = GeoUtils.extent_to_windows(
                                      base_ext, actual_transform)

                                # logger.info(range(sub_start_row, sub_stop_row))
                                # logger.info(range(sub_start_col, sub_stop_col))

                                def sync(sr, sc):
                                    # sub centroid
                                    sx, sy = transform.xy(
                                        actual_transform, sr, sc)

                                    # polygon to check would it intersect initial AOI
                                    sub_poly = GeoUtils.define_polygon(
                                        sx, sy, cellSize)

                                    # skip sub_row conditions
                                    skip_sub_row = False
                                    if actual_aoi.contains(sub_poly) or test:
                                        skip_sub_row = valid_band[season_type][
                                            sr, sc]
                                    else:
                                        skip_sub_row = True

                                    if not skip_sub_row:
                                        # read all metadata
                                        sub_cell_id = cell_id_band[sr, sc]
                                        sub_country_code, sub_country_id = country_code_band[
                                            sr, sc], country_id_band[sr, sc]
                                        sub_ds_start, sub_ds_end = ds_s_band[
                                            sr, sc], ds_e_band[sr, sc]
                                        sub_ws_start, sub_ws_end = ws_s_band[
                                            sr, sc], ws_e_band[sr, sc]
                                        sub_seasons = [
                                            (GS, sub_ws_start, sub_ws_end),
                                            (OS, sub_ds_start, sub_ds_end)
                                        ]  # dates ranges for the loop

                                        # neighbours should be in the same period, otherwise we'll try to fetch them later
                                        if (seasons == sub_seasons):
                                            logger.info(
                                                "Processing sub cell_id {}...".
                                                format(sub_cell_id))

                                            sub_aoi = GeoUtils.define_aoi(
                                                sx, sy, cellSize
                                            )  # aoi by a cell grid x, y

                                            # query planet api and check would this cell grid have good enough cloud coverage for this cell grid
                                            sub_planet_filters = pclient.set_filters_sr(
                                                sub_aoi,
                                                start_date=dt_construct(
                                                    month=m_start,
                                                    year=current_year_start),
                                                end_date=dt_construct(
                                                    month=m_end,
                                                    year=current_year_end),
                                                id=scene_id)
                                            res = pclient.request_intersecting_scenes(
                                                sub_planet_filters)

                                            sub_bbox_local = GeoUtils.define_BoundingBox(
                                                sx, sy, cellSize)
                                            # nodata percentage
                                            sub_nodata_perc = nodata_stats_wraped(
                                                output_localfile,
                                                sub_bbox_local)
                                            # use custom cloud detection function to calculate clouds and shadows
                                            sub_cloud_perc, sub_shadow_perc = cloud_shadow_stats_config_wraped(
                                                output_localfile,
                                                GeoUtils.define_BoundingBox(
                                                    sx, sy, cellSize),
                                                cloud_config)
                                            # check if cell grid is good enough
                                            if (sub_cloud_perc <=
                                                    pclient.max_clouds
                                                    and sub_shadow_perc <=
                                                    pclient.max_shadows
                                                    and sub_nodata_perc <=
                                                    pclient.max_nodata):
                                                # flag to avoid extra lookup into array
                                                sub_valid = False
                                                # select the only one image as it's the only one
                                                for item in res.items_iter(1):
                                                    valid_band[season_type][
                                                        sr, sc] = True
                                                    sub_valid = True

                                                if sub_valid:
                                                    sub_global_row, sub_global_col = master_grid.index(
                                                        sx, sy)
                                                    base_sub_row = [
                                                        sub_cell_id, scene_id,
                                                        sub_global_col,
                                                        sub_global_row,
                                                        season_type,
                                                        output_file, tms_uri
                                                    ]
                                                    writer.writerow(
                                                        base_sub_row)
                                                    psql_rows.append(
                                                        ('planet', scene_id,
                                                         str(sub_cell_id),
                                                         season_type,
                                                         str(sub_global_col),
                                                         str(sub_global_row),
                                                         output_file, tms_uri))

                                for sr in range(sub_start_row, sub_stop_row):
                                    for sc in range(sub_start_col,
                                                    sub_stop_col):
                                        neighbours_executor.submit(
                                            sync, sr, sc)

                                # await all neighbours
                                neighbours_executor.drain()
                                # await all downloads
                                pclient.drain()
                                # insert everything into psql
                                psqlclient.insert_rows_by_one_async(psql_rows)
                                # cleanup local catalog
                                pclient.cleanup_catalog()
                                # refresh RF token
                                rfclient.refresh()

                            # base_row = [cell_id, scene_id, season_type, ""]
                            # writer.writerow(base_row)

    # await threadpool to stop
    neighbours_executor.close()
    fp.close()
    pclient.upload_s3_csv()
    pclient.close()
    psqlclient.close()
    print("-------------------")
    print("Results:")
    print("-------------------")
    print("GS: valid {} / {}".format(np.count_nonzero(valid_band[GS]),
                                     valid_band[GS].size))
    print("OS: valid {} / {}".format(np.count_nonzero(valid_band[OS]),
                                     valid_band[OS].size))
    print("-------------------")
Esempio n. 7
0
geolocator = Nominatim()

from geo_utils import GeoUtils
from waymap import WayMap

from pymongo import MongoClient

mongo_client = MongoClient(host="localhost", port=27017)

db_name = "personalization"
db = mongo_client[db_name]

#from PyQt5 import QtWidgets

wmap = WayMap()
utils = GeoUtils()

# start mongo if not started
try:
    os.system("sudo service mongod restart")
except Exception:
    pass

app = Flask(__name__)
app.config['GOOGLEMAPS_KEY'] = "AIzaSyCMhFUOGH9jLY44y1edzxBLKlmoBOlp_GY"
GoogleMaps(app, key="AIzaSyCMhFUOGH9jLY44y1edzxBLKlmoBOlp_GY")


@app.route("/")
def main():
    return render_template('main.html')
Esempio n. 8
0
    coll.insert(packet)

packet = coll.find_one({"_id": _id})
direction_legs = packet["legs"]

PR = PolylineRoute()
lat, lng = PR.polyline_route(direction_legs)

df = pd.DataFrame()
df["latitude"], df["longitude"] = lat, lng

df = pd.concat((df, df[["latitude", "longitude"]].shift().\
    rename(columns={"latitude":"latitude1", "longitude":"longitude1"})),axis=1)
df.loc[0, "latitude1"] = df.loc[0, "latitude"]
df.loc[0, "longitude1"] = df.loc[0, "longitude"] 
geo_prop = GeoUtils()

df["distance"] = df.apply(lambda x: geo_prop.haversine_distance((x["latitude1"], x["longitude1"]),\
    (x["latitude"], x["longitude"])), axis=1)
df["bearing_angle"] = df.apply(lambda x: geo_prop.compass((x["latitude1"], x["longitude1"]),\
    (x["latitude"], x["longitude"]))["angles"]["degrees"], axis=1)
df["direction"] = df.apply(lambda x:geo_prop.compass((x["latitude1"], x["longitude1"]),\
    (x["latitude"], x["longitude"]))["directions"]["long"], axis=1)
print (df.head(30))

centre_lat=df.loc[len(df)/2, "latitude"]
centre_lng=df.loc[len(df)/2, "longitude"]
wmap = waymap.WayMap(cent_lat=centre_lat, cent_lng=centre_lng)
wmap.plot_route(df, plot_type="scatter", type="HTML", data_for=wmap.html_handling(df, "random"))
wmap.draw("route_map.html")
max_shadows = float(imagery_config['max_shadows'])
max_nodata = float(imagery_config['max_nodata'])
output_path = imagery_config['catalog_path']

# logger
# logger = logging.getLogger(__x name__)
# logger.setLevel(logging.INFO)
# logging.basicConfig(format = '%(message)s', datefmt = '%m-%d %H:%M')

# pclient init
pclient = PClientV1(api_key, config)

# # build a valid dt string from a month number
# def dt_construct(month, day = 1, year = 2018, t = "00:00:00.000Z"):
#     return "{}-{:02d}-{:02d}T{}".format(year, month, day, t)
aoi = GeoUtils.define_aoi(x, y, cellSize)  # aoi by a cell grid x, y
# print(aoi)

geom = {}
scene_id = ''
output_file = ''
output_localfile = ''
# tms_uri = ''

# start_date = dt_construct(month = mo_start, day = day_start, year = yr_start)
# end_date = dt_construct(month = mo_end, day = day_end, year = yr_end)
# step = datetime.timedelta(days=1)
start = datetime.datetime(yr_start, mo_start, day_start, 0, 0, 0)
# start2 = datetime.datetime(yr_start, mo_start, day_start, 23, 59, 59)
end = datetime.datetime(yr_end, mo_end, day_end, 0, 0, 0, 0)
# step = datetime.timedelta(days=1)
Esempio n. 10
0
def cloud_shadow_stats_old(in_name,
                           bounds,
                           cloud_val=1500,
                           shadow_val=2000,
                           land_val=1000):
    """
    Input parameter:
    in_name    - The full path of a Geotiff format image. e.g., r"D:\test_image\planet.tif"
    bounds     - lat lon bounds to read data from
    cloud_val  - The threshold of cloud in the min image(for more about "min image", see #2 in the following); default = 2500;  
    shadow_val - The threshold of shadow in the max image; default = 1500;
    land_val   - The threshold of land in the Near Infrared image (band 4); defalt = 1000
    Output: cloud_perc, shadow_perc
    The output is a tuple with two float numbers:  
    cloud_perc  - cloud pixels percentage in that image, 
    shadow_perc - shadow percentage in that image.
    """

    src = rasterio.open(in_name)

    # bounds in the src crs projection
    # (left, bottom, right, top)
    # double check that bounds belong to image
    transformed_bounds = GeoUtils.extent_to_BoundingBox(
        GeoUtils.extent_intersection(
            GeoUtils.BoundingBox_to_extent(src.bounds),
            GeoUtils.BoundingBox_to_extent(
                BoundingBox(
                    *transform_bounds("EPSG:4326", src.crs, *bounds)))))

    # calculate window to read from the input tiff
    actual_window = GeoUtils.bounds_to_windows(transformed_bounds, src)

    # 1 open the tif, take 4 bands, and read them as arrays
    b1_array, b2_array, b3_array, b4_array = src.read(window=actual_window)

    # 2. make max image and min image from four input bands.
    # np.dstack() takes a list of bands and makes a band stack
    # np.amax() find the max along the axis, here 2 means the axis that penetrates through bands in each pixel.
    band_list = [b1_array, b2_array, b3_array, b4_array]
    stacked = np.dstack(band_list)
    max_img = np.amax(stacked, 2)
    min_img = np.amin(stacked, 2)

    del b1_array, b2_array, b3_array, band_list

    # 3. make max 7x7 filtered max and min image
    max7x7_img = ndimage.maximum_filter(max_img, 7)
    min7x7_img = ndimage.minimum_filter(min_img, 7)

    del max_img, min_img

    # 4. extract cloud, shadow&water, land
    # The threshold here is based on Sitian and Tammy's test on 11 planet scenes.  It may not welly work for every AOI.
    # Apparently np.where() method will change or lost the datatype, so .astype(np.int16) is used to make sure the datatype is the same as original
    cloud_array = np.where(min7x7_img > cloud_val, 1, 0).astype(np.int16)
    shadow_and_water_array = np.where(max7x7_img < shadow_val, 1,
                                      0).astype(np.int16)
    land_array = np.where(b4_array > land_val, 1, 0).astype(np.int16)

    del max7x7_img, min7x7_img, b4_array

    # 5. get shadow by masking
    shadow_array = np.where(land_array == 1, shadow_and_water_array,
                            0).astype(np.int16)

    # 6. Calculate Statistics
    grid_count = np.ma.count(shadow_array)  # acutally count all pixels
    cloud_count = np.count_nonzero(cloud_array == 1)
    shadow_count = np.count_nonzero(shadow_array == 1)

    cloud_perc = cloud_count / grid_count
    shadow_perc = shadow_count / grid_count

    del cloud_array, shadow_and_water_array, land_array, shadow_array
    return cloud_perc, shadow_perc
Esempio n. 11
0
def cloud_shadow_stats(in_name,
                       bounds,
                       cloud_val=1500,
                       object_size_thresh=200,
                       eccentricity_thresh=0.95,
                       peri_to_area_ratio=0.3,
                       shadow_reflectance_thresh=1500,
                       land_reflectance_thresh=1000):
    """
    Input parameter:
    in_name    - The full path of a Geotiff format image. e.g., r"D:\test_image\planet.tif"
    bounds     - lat lon bounds to read data from
    cloud_val  - The threshold of cloud in the min image(for more about "min image", see #2 in the following);  
    Output: cloud_perc
    The output is a tuple with two float numbers:  
    cloud_perc  - cloud pixels percentage in that image, 
    """

    src = rasterio.open(in_name)

    src_ext = GeoUtils.BoundingBox_to_extent(src.bounds)
    bounds_ext = GeoUtils.BoundingBox_to_extent(
        BoundingBox(*transform_bounds("EPSG:4326", src.crs, *bounds)))

    if not GeoUtils.extents_intersects(src_ext, bounds_ext):
        return 1, 1

    # bounds in the src crs projection
    # (left, bottom, right, top)
    # double check that bounds belong to image
    transformed_bounds = GeoUtils.extent_to_BoundingBox(
        GeoUtils.extent_intersection(src_ext, bounds_ext))

    # calculate window to read from the input tiff
    actual_window = GeoUtils.bounds_to_windows(transformed_bounds, src)

    # 1 open the tif, take 4 bands, and read them as arrays
    b1_array, b2_array, b3_array, b4_array = src.read(window=actual_window)

    # 2. make max image and min image from four input bands.
    # np.dstack() takes a list of bands and makes a band stack
    # np.amax() find the max along the axis, here 2 means the axis that penetrates through bands in each pixel.
    band_list = [b1_array, b2_array, b3_array, b4_array]
    stacked = np.dstack(band_list)
    min_img = np.amin(stacked, 2)

    del b1_array, b2_array, b3_array, band_list

    # 4. extract cloud, shadow&water, land
    # The threshold here is based on Sitian and Tammy's test on 11 planet scenes.  It may not welly work for every AOI.
    # Apparently np.where() method will change or lost the datatype, so .astype(np.int16) is used to make sure the datatype is the same as original
    cloud_array_initial = np.where(min_img > cloud_val, 1, 0).astype(np.int16)
    shadow_array_initial = initial_shadow_filter(stacked,
                                                 shadow_reflectance_thresh,
                                                 land_reflectance_thresh)
    del min_img

    cloud_array = cloud_size_shape_filter(cloud_array_initial,
                                          object_size_thresh,
                                          eccentricity_thresh)
    shadow_array = shadow_size_shape_filter(shadow_array_initial,
                                            object_size_thresh,
                                            eccentricity_thresh,
                                            peri_to_area_ratio)
    # 6. Calculate Statistics
    grid_count = np.ma.count(cloud_array)  # acutally count all pixels
    cloud_count = np.count_nonzero(cloud_array == 1)
    shadow_count = np.count_nonzero(shadow_array == 1)
    cloud_perc = cloud_count / grid_count
    shadow_perc = shadow_count / grid_count
    del cloud_array, shadow_array
    return cloud_perc, shadow_perc