Пример #1
0
    def test_ascatb_match(self):
        from shapely.wkt import loads
        from nexustiles.nexustiles import NexusTileService

        polygon = loads(
            "POLYGON((-34.98 29.54, -30.1 29.54, -30.1 31.00, -34.98 31.00, -34.98 29.54))"
        )
        primary_ds = "ASCATB-L2-Coastal"
        matchup_ds = "spurs"
        parameter = "wind"
        start_time = 1351468800  # 2012-10-29T00:00:00Z
        end_time = 1351555200  # 2012-10-30T00:00:00Z
        time_tolerance = 86400
        depth_tolerance = 5.0
        radius_tolerance = 110000.0  # 110 km
        platforms = "1,2,3,4,5,6,7,8,9"

        tile_service = NexusTileService()
        tile_ids = [
            tile.tile_id
            for tile in tile_service.find_tiles_in_polygon(polygon,
                                                           primary_ds,
                                                           start_time,
                                                           end_time,
                                                           fetch_data=False,
                                                           fl='id')
        ]
        result = spark_matchup_driver(tile_ids, wkt.dumps(polygon), primary_ds,
                                      matchup_ds, parameter, time_tolerance,
                                      depth_tolerance, radius_tolerance,
                                      platforms)
        for k, v in result.iteritems():
            print "primary: %s\n\tmatches:\n\t\t%s" % (
                "lon: %s, lat: %s, time: %s, wind u,v: %s,%s" %
                (k.longitude, k.latitude, k.time, k.wind_u, k.wind_v),
                '\n\t\t'.join([
                    "lon: %s, lat: %s, time: %s, wind u,v: %s,%s" %
                    (i.longitude, i.latitude, i.time, i.wind_u, i.wind_v)
                    for i in v
                ]))
Пример #2
0
    def test_smap_match(self):
        from shapely.wkt import loads
        from nexustiles.nexustiles import NexusTileService

        polygon = loads(
            "POLYGON((-34.98 29.54, -30.1 29.54, -30.1 31.00, -34.98 31.00, -34.98 29.54))"
        )
        primary_ds = "SMAP_L2B_SSS"
        matchup_ds = "spurs"
        parameter = "sss"
        start_time = 1350259200  # 2012-10-15T00:00:00Z
        end_time = 1350345600  # 2012-10-16T00:00:00Z
        time_tolerance = 86400
        depth_tolerance = 5.0
        radius_tolerance = 1500.0
        platforms = "1,2,3,4,5,6,7,8,9"

        tile_service = NexusTileService()
        tile_ids = [
            tile.tile_id
            for tile in tile_service.find_tiles_in_polygon(polygon,
                                                           primary_ds,
                                                           start_time,
                                                           end_time,
                                                           fetch_data=False,
                                                           fl='id')
        ]
        result = spark_matchup_driver(tile_ids, wkt.dumps(polygon), primary_ds,
                                      matchup_ds, parameter, time_tolerance,
                                      depth_tolerance, radius_tolerance,
                                      platforms)
        for k, v in result.iteritems():
            print "primary: %s\n\tmatches:\n\t\t%s" % (
                "lon: %s, lat: %s, time: %s, sst: %s" %
                (k.longitude, k.latitude, k.time, k.sst), '\n\t\t'.join([
                    "lon: %s, lat: %s, time: %s, sst: %s" %
                    (i.longitude, i.latitude, i.time, i.sst) for i in v
                ]))
Пример #3
0
def calculate_monthly_average(month=None, bounding_polygon_wkt=None, ds=None):
    EPOCH = pytz.timezone('UTC').localize(datetime(1970, 1, 1))
    tile_service = NexusTileService()
    min_date, max_date = get_min_max_date(tile_service, ds=ds)
    monthly_averages, monthly_counts = [], []
    monthly_mins, monthly_maxes = [], []
    bounding_polygon = shapely.wkt.loads(bounding_polygon_wkt)
    for year in range(min_date.year, max_date.year + 1):
        if (max_date.year - year) > 10:
            continue
        beginning_of_month = datetime(year, month, 1)
        end_of_month = datetime(year, month,
                                calendar.monthrange(year, month)[1], 23, 59,
                                59)
        start = (pytz.UTC.localize(beginning_of_month) - EPOCH).total_seconds()
        end = (pytz.UTC.localize(end_of_month) - EPOCH).total_seconds()
        tile_stats = tile_service.find_tiles_in_polygon(
            bounding_polygon,
            ds,
            start,
            end,
            fl=('id,'
                'tile_avg_val_d,tile_count_i,'
                'tile_min_val_d,tile_max_val_d,'
                'tile_min_lat,tile_max_lat,'
                'tile_min_lon,tile_max_lon'),
            fetch_data=False)
        if len(tile_stats) == 0:
            continue

        print('calculate_monthly_average: Got {} tiles'.format(
            len(tile_stats)))
        # Split list into tiles on the border of the bounding box and tiles completely inside the bounding box.
        border_tiles, inner_tiles = [], []
        for tile in tile_stats:
            inner_tiles.append(tile) if bounding_polygon.contains(
                shapely.geometry.box(
                    tile.bbox.min_lon, tile.bbox.min_lat, tile.bbox.max_lon,
                    tile.bbox.max_lat)) else border_tiles.append(tile)

        # We can use the stats of the inner tiles directly
        tile_means = [tile.tile_stats.mean for tile in inner_tiles]
        tile_mins = [tile.tile_stats.min for tile in inner_tiles]
        tile_maxes = [tile.tile_stats.max for tile in inner_tiles]
        tile_counts = [tile.tile_stats.count for tile in inner_tiles]

        # Border tiles need have the data loaded, masked, and stats recalculated
        border_tiles = list(tile_service.fetch_data_for_tiles(*border_tiles))
        border_tiles = tile_service.mask_tiles_to_polygon(
            bounding_polygon, border_tiles)
        for tile in border_tiles:
            tile.update_stats()
            tile_means.append(tile.tile_stats.mean)
            tile_mins.append(tile.tile_stats.min)
            tile_maxes.append(tile.tile_stats.max)
            tile_counts.append(tile.tile_stats.count)

        tile_means = np.array(tile_means)
        tile_mins = np.array(tile_mins)
        tile_maxes = np.array(tile_maxes)
        tile_counts = np.array(tile_counts)

        sum_tile_counts = np.sum(tile_counts) * 1.0

        monthly_averages += [
            np.average(tile_means, None, tile_counts / sum_tile_counts).item()
        ]
        monthly_mins += [
            np.average(tile_mins, None, tile_counts / sum_tile_counts).item()
        ]
        monthly_maxes += [
            np.average(tile_maxes, None, tile_counts / sum_tile_counts).item()
        ]
        monthly_counts += [sum_tile_counts]

    count_sum = np.sum(monthly_counts) * 1.0
    weights = np.array(monthly_counts) / count_sum

    return np.average(monthly_averages, None, weights).item(), \
           np.average(monthly_averages, None, weights).item(), \
           np.average(monthly_averages, None, weights).item()