예제 #1
0
    def __init__(self, start, finish, geo_json: json, weighted=False):

        self.weighted = weighted
        self.geo_json = geo_json

        logger.debug(geo_json["type"])

        # A regionmask Object
        # A list of the Shapely.Polygons
        selections = list()
        count = 0
        numbers = []
        names = []
        abbrevs = []
        if geo_json["type"] == "FeatureCollection":
            logger.debug("Found a feature collection...")
            for p in geo_json["features"]:

                logger.debug("Found a Feature.")
                if p["geometry"]["type"] == "Polygon":
                    logger.debug("Found Polygon #%s" % count)
                    points = p["geometry"]["coordinates"]
                    s = shapely.geometry.Polygon(*points)
                    selections.append(s)
                    numbers.append(count)
                    names.append("Selection_%s" % count)
                    abbrevs.append("SEL_%s" % count)

                    count += 1

        logger.debug("Making Region Mask with %s Polygons." % count)
        logger.debug("numbers: %s" % numbers)
        logger.debug("names: %s" % names)
        logger.debug("abbrevs: %s" % abbrevs)
        logger.debug("selections: %s" % selections)

        self.rmask = regionmask.Regions_cls(0, numbers, names, abbrevs,
                                            selections)

        self.selections = selections
        logger.debug(["%s" % sel for sel in selections])

        # Do once and store
        # self.mask = self.get_super_sampled_mask()  # TODO - remove default creation of mask and require setting the transform according to dataset projection

        hull = ShapeQuery.get_query_hull(self.rmask)
        lat1, lon2, lat2, lon1 = hull.bounds

        # logger.debug(lat1, lon1, lat2, lon2)

        self.spatio_temporal_query = SpatioTemporalQuery(
            lat1, lon1, lat2, lon2, start, finish)
        self.temporal = self.spatio_temporal_query.temporal
        self.spatial = self.spatio_temporal_query.spatial
        self.transform = [0.05, 0.0, 111.975, 0.0, -0.05, -9.974999999999994]
        self.schema = ShapeQuerySchema()
예제 #2
0
    async def consolidate_year(self, y):
        with open(Model.path() + 'australia.pickle', 'rb') as pickled_australia:
            australia = pickle.load(pickled_australia)

        AUmask = regionmask.Regions_cls(
            'AUmask', [0], ['Australia'], ['AU'], [australia.polygon])

        for year in range(y, y + 1):
            with xr.open_mfdataset("%s%s_%s*" % (self.outputs['readings']['path'], self.outputs['readings']['prefix'], year), chunks={'time': 1}) as ds:
                ds['DFMC'] = ds['DFMC'].isel(observations=0, drop=True)
                dm = ds['DFMC'].isel(time=0)
                mask = AUmask.mask(dm['longitude'], dm['latitude'])
                mask_ma = np.ma.masked_invalid(mask)
                ds = ds.where(mask_ma == 0)
                logger.debug("--- Saving %s" % (year))
                ds.attrs = dict()
                ds.attrs['crs'] = "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs "
                ds.attrs['var_name'] = 'DFMC'

                ds['time'].attrs['long_name'] = 'time'
                ds['time'].attrs['name'] = 'time'
                ds['time'].attrs['standard_name'] = 'time'

                ds['DFMC'].attrs['units'] = 'z-scores'
                ds['DFMC'].attrs[
                    'long_name'] = 'Dead Fine Fuels Moisture Content - (Percentage wet over dry by weight)'
                ds['DFMC'].attrs['name'] = self.outputs['readings']['prefix']
                ds['DFMC'].attrs['standard_name'] = self.outputs['readings']['prefix']

                # logger.debug(ds)
                # logger.debug(ds[self.outputs['readings']['prefix']])
                # File is opened by itself, can't save because we're self locking
                temp = ds
                # close the handle first and then save

            tfile = "%s%s_%s.nc" % (self.path,
                                    self.outputs['readings']['prefix'],
                                    str(year))




            try:
                os.rename(tfile, tfile + '.tmp')
            except e:
                logger.error(e)

            try:
                ds.to_netcdf(tfile, mode='w', format='NETCDF4')
                os.remove(tfile + '.tmp')
            except e:
                logger.error(e)
                return false

        return True
예제 #3
0
def state_mask(state, ds, name, abbr):
    """
    create mask from a geodataframe
    Args:
       state (geodataframe): state to use
       ds (xarray.DataArray): DataArray with coords to match
       name (str): name of state
       abbr (str): abbreviation of state
       """
    number = [0]
    region = regionmask.Regions_cls('statemask', number, [name], [abbr],
                                    state.geometry.values)
    mask = region.mask(ds, wrap_lon=True) + 1

    return mask
예제 #4
0
def build_mask(dsbbox, mascon_gdf, dacoords, serialize=False, datadir=None):
    """
    Builds a mask defining the spatial units over which aggregation will occur. Clips the boundaries of the mask to the spatial domain of the underlying dataset.

    Parameters
    ----------
    dsbbox: List or Tuple representing the 
        minx, miny, maxx, maxy

    This function currently not being called and should probably be decomissioned soon.

    """
    # Build a polygon around the extent of the LIS output so we can subset the GRACE data
    coords = [(dsbbox[0], dsbbox[1]), (dsbbox[0], dsbbox[3]),
              (dsbbox[2], dsbbox[3]), (dsbbox[2], dsbbox[1])]
    # Create a Shapely polygon from the coordinate-tuple list
    lispoly = Polygon(coords)
    df = pd.DataFrame.from_records([{'region_name': 'dataset'}])
    geometry = [lispoly]
    regiondf = gpd.GeoDataFrame(df,
                                geometry=geometry,
                                crs={'init': 'epsg4326'})

    gpd_intersect = gpd.overlay(regiondf, mascon_gdf, how='intersection')

    # Create mask
    numbers = gpd_intersect.index.values
    names = gpd_intersect['mascon'].values
    abbrevs = gpd_intersect['mascon'].values
    m = regionmask.Regions_cls('region_mask', numbers, names, abbrevs,
                               gpd_intersect.geometry.values)
    m.name = 'mascon'

    gracemsk = m.mask(dacoords, lon_name='longitude', lat_name='latitude')
    gracemsk.name = 'mask'

    if serialize:
        if datadir:
            fname = os.path.join(datadir, 'gracemsk.nc')
            with ProgressBar():
                print('Exporting {}'.format(fname))
                gracemsk.to_netcdf(fname)
        else:
            print('Need datadir to be specified.')
    return gracemsk, gpd_intersect
예제 #5
0
 def apply_mask_to(
         self, result_cube: xr.DataArray) -> (xr.DataArray, xr.DataArray):
     rc = result_cube[result_cube.attrs['var_name']].isel(time=0)
     s = rc.shape
     pickled_aussie = open('./FuelModels/australia.pickle', 'rb')
     australia = pickle.load(pickled_aussie)
     numbers = [0]
     name = ['Australia']
     abbrev = ['AU']
     AUmask = regionmask.Regions_cls('AUmask', numbers, name, abbrev,
                                     [australia.polygon])
     au_scaled_mask = AUmask.mask(rc['longitude'], rc['latitude'])
     au_masked = np.ma.masked_invalid(au_scaled_mask)
     result_cube = result_cube.where(au_masked == 0)
     mask = self.get_super_sampled_mask(transform=None, data_shape=s)
     result_cube['mask'] = xr.DataArray(mask,
                                        coords=[
                                            result_cube['latitude'].data,
                                            result_cube['longitude'].data
                                        ],
                                        dims=['latitude', 'longitude'])
     return result_cube, result_cube['mask']
예제 #6
0
def build_mask(dsbbox, mascon_gdf, dacoords, serialize=False, datadir=None):
    """
    dsbbox: List or Tuple 
        minx, miny, maxx, maxy
    """
    # Build a polygon around the extent of the LIS output so we can subset the GRACE data
    coords = [(dsbbox[0], dsbbox[1]), (dsbbox[0], dsbbox[3]),
              (dsbbox[2], dsbbox[3]), (dsbbox[2], dsbbox[1])]
    # Create a Shapely polygon from the coordinate-tuple list
    lispoly = Polygon(coords)
    df = pd.DataFrame.from_records([{'region_name': 'dataset'}])
    geometry = [lispoly]
    regiondf = gpd.GeoDataFrame(df,
                                geometry=geometry,
                                crs={'init': 'epsg4326'})

    gpd_intersect = gpd.overlay(regiondf, mascon_gdf, how='intersection')

    # Create mask
    numbers = gpd_intersect.index.values
    names = gpd_intersect['mascon'].values
    abbrevs = gpd_intersect['mascon'].values
    m = regionmask.Regions_cls('region_mask', numbers, names, abbrevs,
                               gpd_intersect.geometry.values)
    m.name = 'mascon'

    gracemsk = m.mask(dacoords, lon_name='longitude', lat_name='latitude')
    gracemsk.name = 'mask'

    if serialize:
        if datadir:
            fname = os.path.join(datadir, 'gracemsk.nc')
            with ProgressBar():
                print('Exporting {}'.format(fname))
                gracemsk.to_netcdf(fname)
        else:
            print('Need datadir to be specified.')
    return gracemsk, gpd_intersect
예제 #7
0
def get_dra_mask(lons, lats):
    #get 'Data Release Area' mask for the Central Arctic, published by Rothrock et al, 2008
    #this is the area for which submarine draft data is available (1979-2000)
    #and all Kwok papers use this area to show trend extended by IS and CS-2 data

    #regionmask does not handle well the circular polygons around the NP
    lon360 = np.where(lons < 0, 360 + lons, lons)

    poly1 = [[360., 90.], [360., 87.], [345., 87.], [300., 86.58], [230., 80.],
             [219., 80.], [219., 70.], [205., 72.], [180., 74.], [180., 90.],
             [360., 90.]]

    poly2 = [[0., 86.], [0., 90.], [180., 90.], [180., 74.], [175., 75.50],
             [172., 78.50], [163., 80.50], [126., 78.50], [110., 84.33],
             [80., 84.42], [57., 85.17], [33., 83.83], [8., 84.08], [0., 86.]]

    numbers = [0, 1]
    names = ['Arctic_west', 'Arctic_east']
    abbrevs = ['Aw', 'Ae']
    Arctic_mask = regionmask.Regions_cls('DRA_mask', numbers, names, abbrevs,
                                         [poly1, poly2])

    #Make raster
    mask = Arctic_mask.mask(lons, lats, wrap_lon=True)
    #Merge mask
    mask = np.where(mask >= 0, 1, 0)
    # pcolormesh does not handle NaNs, requires masked array
    age_mask = np.ma.masked_invalid(mask)

    ##Plot mask
    #outpath_plots = 'plots/new/'
    #outname = outpath_plots+'age_mask_DRA.png'
    #plot_pcolormesh(lons,lats,age_mask,outname,cmap='viridis',label='Central Arctic Mask=1')
    #exit()

    return (age_mask)
예제 #8
0
    def apply_mask_to(self, result_cube: xr.DataArray) -> xr.DataArray:
        rc = result_cube[result_cube.attrs['var_name']].isel(time=0)
        s = rc.shape
        logger.debug(s)

        australia = regionmask.defined_regions.natural_earth.countries_50.__getitem__(
            'Australia')
        numbers = [0]
        name = ['Australia']
        abbrev = ['AU']
        AUmask = regionmask.Regions_cls('AUmask', numbers, name, abbrev,
                                        [australia.polygon])
        au_scaled_mask = AUmask.mask(rc['longitude'], rc['latitude'])

        logger.debug('All NaN? %s', np.all(np.isnan(au_scaled_mask)))
        au_masked = np.ma.masked_invalid(au_scaled_mask)
        result_cube = result_cube.where(au_masked == 0)

        self.weighted = False  # DEBUGGING!!

        if self.weighted:
            # Apply Spatial Mask on every timeslice.
            # TODO - DEBUG THIS - use the affine / transform of the resultcube to create and project the mask!!!
            mask = self.get_super_sampled_mask(
                transform=[0.05, 0.0, 111.975, 0.0, -0.05, -9.974999999999994],
                data_shape=s)
            logger.debug(mask)
            mask3d = np.broadcast_to(
                mask != 0, result_cube[result_cube.attrs['var_name']].shape)

            fuel_moistures = result_cube.where(mask3d != 0) * (mask3d / 255)

            logger.debug("Masked by weighted area!")

        else:

            # # Minimal case - shape is entirely contained in just one cell...
            # # TODO - remove hard-coded spatial resolution
            # if result_cube['longitude'].max() - result_cube['longitude'].min() <= 0.05 and \
            #         result_cube['latitude'].max() - result_cube['latitude'].min() <= 0.05:
            #     # single latitude
            # else:
            # Binary Masking
            mask = self.rmask.mask(result_cube['longitude'],
                                   result_cube['latitude'],
                                   xarray=True)
            mask = mask.rename({'lat': 'latitude', 'lon': 'longitude'})
            region = np.ma.filled(mask.astype(float), np.nan)

            # Set all values in the mask layer to either zero or NaN
            # Otherwise the selection indexes will determine the results
            region = region * 0

            # Can then use WHERE to get Binary True/False masking for all parts
            # of the selection as a unified group...
            fuel_moistures = result_cube.where(region == 0)
            logger.debug("Masked by binary inclusion: ")

        # logger.debug('All NaN?', np.all(np.isnan(fuel_moistures.data)))

        return fuel_moistures
예제 #9
0
data = t_in + model + '_2018*_sfc.nc'
ds = xr.open_mfdataset(data, chunks = {'time': 10})

# With the function assign_coords the longitude is converted from the 0-360 range to -180,180 
ds = ds.assign_coords(longitude=(((ds.longitude + 180) % 360) - 180)).sortby('longitude')

############################## PART TO MASK THE WHOLE AFRICA CONTINENT 
#http://datapages.com/gis-map-publishing-program/gis-open-files/global-framework/global-heat-flow-database/shapefiles-list

# Load the shapefile
PATH_TO_SHAPEFILE = './continent_shapefile/continent.shp'
nuts = gpd.read_file(PATH_TO_SHAPEFILE)
nuts

# CALCULATE MASK
nuts_mask_poly = regionmask.Regions_cls(name = 'nuts_mask', numbers = list(range(0,8)), names = list(nuts.CONTINENT), abbrevs = list(nuts.CONTINENT), outlines = list(nuts.geometry.values[i] for i in range(0,8)))
print(nuts_mask_poly)

mask = nuts_mask_poly.mask(ds.isel(time = 0).sel(latitude = slice(39, -38), longitude = slice(-25, 55)), lat_name='latitude', lon_name='longitude')
mask
mask.to_netcdf('./mask_all_continent.nc') 

plt.figure(figsize=(12,8))
ax = plt.axes()
mask.plot(ax = ax)
nuts.plot(ax = ax, alpha = 0.8, facecolor = 'none', lw = 1)

ID_COUNTRY = 3
print(nuts.CONTINENT[ID_COUNTRY])
lat = mask.latitude.values
lon = mask.longitude.values
예제 #10
0
import matplotlib as mpl

PATH_TO_SHAPEFILE = './shapefiles/Africa_Countries.shp'
countries = gpd.read_file(PATH_TO_SHAPEFILE)
countries.head()
my_list = list(countries['CODE'])
my_list_unique = set(list(countries['CODE']))
indexes = [my_list.index(x) for x in my_list_unique]
model = 'D:/Utilisateurs/guillaume/Desktop/PROJET_AFR44/ARC2/netcdf/ARC2_198901.nc'
data = model
ds = xr.open_mfdataset(data, chunks={'time': 10})

countries_mask_poly = regionmask.Regions_cls(
    name='COUNTRY',
    numbers=indexes,
    names=countries.COUNTRY[indexes],
    abbrevs=countries.COUNTRY[indexes],
    outlines=list(countries.geometry.values[i]
                  for i in range(0, countries.shape[0])))

mask = countries_mask_poly.mask(ds.isel(time=0),
                                lat_name='lat',
                                lon_name='lon')
mask.to_netcdf('./mask_ARC2_by_countries.nc')

mask_ouganda = mask.where(mask == 758) / mask.where(mask == 758)
mask_ouganda.to_netcdf('./mask_ARC2_Ouganda.nc')

np_points = (mask_ouganda.values == 1).sum()

plt.figure(figsize=(16, 8))
예제 #11
0
def get_poly_mask(lons, lats):
    #create a geographical polygon for the Central Arctic (without the narrow band off the CAA)
    #https://regionmask.readthedocs.io/en/stable/_static/notebooks/create_own_regions.html
    #make two masks - one for W and one for E Arctic

    #regionmask does not handle well the circular polygons around the NP
    lon360 = np.where(lons < 0, 360 + lons, lons)
    #print(lon360)

    #i,j coordinates of corner points can be found by exploring display in ncview
    #W Arctic
    poly1 = []
    pt = [360, 90]
    poly1.append(pt)
    pt = [360, lats[273, 115]]
    poly1.append(pt)
    pt = [lon360[273, 115], lats[273, 115]]
    poly1.append(pt)

    pt = [lon360[260, 128], lats[260, 128]]
    poly1.append(pt)
    pt = [lon360[239, 136], lats[239, 136]]
    poly1.append(pt)
    pt = [lon360[228, 145], lats[228, 145]]
    poly1.append(pt)
    pt = [lon360[210, 148], lats[210, 148]]
    poly1.append(pt)

    pt = [lon360[194, 147], lats[194, 147]]
    poly1.append(pt)
    pt = [lon360[157, 156], lats[157, 156]]
    poly1.append(pt)
    pt = [lon360[113, 174], lats[113, 174]]
    poly1.append(pt)
    pt = [lon360[89, 157], lats[89, 157]]
    poly1.append(pt)
    pt = [lon360[29, 123], lats[29, 123]]
    poly1.append(pt)

    ##more radical (even further away from the CAA coast)
    #pt = [lon360[260,132],lats[260,132]];poly1.append(pt)
    #pt = [lon360[239,140],lats[239,140]];poly1.append(pt)
    #pt = [lon360[228,149],lats[228,149]];poly1.append(pt)
    #pt = [lon360[210,152],lats[210,152]];poly1.append(pt)

    #pt = [lon360[194,151],lats[194,151]];poly1.append(pt)
    #pt = [lon360[157,160],lats[157,160]];poly1.append(pt)
    #pt = [lon360[113,178],lats[113,178]];poly1.append(pt)
    #pt = [lon360[65,160],lats[65,160]];poly1.append(pt)
    #pt = [lon360[24,162],lats[29,162]];poly1.append(pt)

    pt = [lon360[3, 194], lats[3, 194]]
    poly1.append(pt)
    pt = [lon360[3, 344], lats[3, 344]]
    poly1.append(pt)
    pt = [180, 65]
    poly1.append(pt)
    pt = [180, 90]
    poly1.append(pt)
    pt = [270, 90]
    poly1.append(pt)
    pt = [360, 90]
    poly1.append(pt)
    #print(poly1)

    #E Arctic
    poly2 = []
    pt = [0, 90]
    poly2.append(pt)
    pt = [90, 90]
    poly2.append(pt)
    pt = [180, 90]
    poly2.append(pt)
    pt = [180, 65]
    poly2.append(pt)
    pt = [lon360[135, 386], lats[135, 386]]
    poly2.append(pt)
    pt = [lon360[238, 390], lats[238, 390]]
    poly2.append(pt)
    pt = [lon360[310, 344], lats[310, 344]]
    poly2.append(pt)
    pt = [lon360[449, 301], lats[449, 301]]
    poly2.append(pt)
    pt = [lon360[350, 122], lats[350, 122]]
    poly2.append(pt)
    pt = [0, lats[273, 115]]
    poly2.append(pt)
    pt = [0, 90]
    poly2.append(pt)
    #print(poly2)

    numbers = [0, 1]
    names = ['Arctic_west', 'Arctic_east']
    abbrevs = ['Aw', 'Ae']
    Arctic_mask = regionmask.Regions_cls('Arctic_mask', numbers, names,
                                         abbrevs, [poly1, poly2])

    ##Plot polygons in Mercator projection
    #ax=Arctic_mask.plot()
    #ax.set_extent([-180, 180, 45, 90], ccrs.PlateCarree())
    #plt.show()

    #Make raster
    mask = Arctic_mask.mask(lons, lats, wrap_lon=True)
    #Merge mask
    mask = np.where(mask >= 0, 1, 0)
    # pcolormesh does not handle NaNs, requires masked array
    age_mask = np.ma.masked_invalid(mask)

    ##Plot mask
    #outpath_plots = 'plots/run04/'
    #outname = outpath_plots+'age_mask_rest.png'
    #plot_pcolormesh(lons,lats,age_mask,outname,cmap='viridis',label='Central Arctic Mask=1')
    #exit()

    return (age_mask)
예제 #12
0
 def getRegion( self, shape: gpd.GeoDataFrame, name_col: str, poly_index, buffer: float = 0.0 ) -> Tuple[Polygon,Regions_cls]:
     poly_name: str = [ shape[name_col].tolist()[poly_index] ]
     poly: Polygon = self.remove_third_dimension( shape.geometry.values[poly_index] )
     if buffer > 0: poly = poly.buffer( buffer )
     print( f" Creating region for polygon with bounds = {poly.envelope.bounds}" )
     return poly, regionmask.Regions_cls( poly_name, [0], [poly_name], [poly_name], [poly] )
예제 #13
0
strlist = ['1', '1']
intlist = [1, 1]

polygons = [Polygon(cham_geom), Polygon(neighbor_geom)]
boundary = cascaded_union(polygons)  # results in multipolygon sometimes
if shape_mask.geom_type == 'MultiPolygon':
    # extract polygons out of multipolygon
    list = []
    for polygon in boundary:
        list.append(polygon)

mask_poly = regionmask.Regions_cls(
    name='FID_BV_Out',
    numbers=intlist,
    names=strlist,
    abbrevs=strlist,
    outlines=list(shape_mask.geometry.values[i]
                  for i in range(0, len(shape_mask.FID_BV_Out))))
mask_poly = regionmask.Regions_cls(name='FID_BV_Out',
                                   numbers=intlist,
                                   names=strlist,
                                   abbrevs=strlist,
                                   outlines=list(shape_mask.geometry[0])[1])

outlines = list(shape_mask.geometry.values[i]
                for i in range(0, len(shape_mask.FID_BV_Out)))
list(shape_mask.geometry[0])[1]

mask = mask_poly.mask(ds.isel(time=0), lat_name='lat', lon_name='lon')
# obtain path and file name information of all *.nc files in '~/ConUS/Climate'
for dirpath, dirnames, filenames in os.walk(
        "/data/rajagopalan/MC2/Harddrive_MC2/ConUS/Climate"):
    for filename in [f for f in filenames if f.endswith("ppt.nc")]:
        f_d.append(os.path.join(dirpath, filename))
all_f = len(f_d)
# loop over all of these *.nc files
for file in range(0, all_f):
    ncf = xr.open_dataset(f_d[file])
    print(ncf)
    yr_len = len(ncf.time)
    lat_n = len(ncf.lat)
    lon_n = len(ncf.lon)
    rb_mask_poly = regionmask.Regions_cls(
        name='Rangelands Distrcits',
        numbers=list(range(0, rb_len)),
        names=list(rb.DISTRICTNA),
        abbrevs=list(rb.DISTRICTOR),
        outlines=list(rb.geometry.values[i] for i in range(0, rb_len)))
    mask = rb_mask_poly.mask(ncf.isel(lat=slice(0, lat_n), lon=slice(0,
                                                                     lon_n)),
                             lat_name='lat',
                             lon_name='lon')
    # public values
    lat = mask.lat.values
    lon = mask.lon.values
    v_m = []
    # a tentitative run to find out the deadspots
    for r_d in range(0, rb_len):
        print("Rangeland district ID", r_d)
        print("Rangeland district name", rb.DISTRICTOR[r_d])
        try:
예제 #15
0
    def __init__(self, start, finish, geo_json, weighted=False):

        self.weighted = weighted

        if type(geo_json) == type(str()):
            # try:
            self.geo_json = geojson.loads(geo_json)
            # except:
            #     try:
            #         # Url Encoded?
            #         self.geo_json = geojson.loads(urldecode(geojson))
            #     except:
            #         pass
        elif type(geo_json) == type(dict()):
            self.geo_json = geojson.loads(geojson.dumps(geo_json))

        elif type(geo_json) == geojson.feature.FeatureCollection:
            self.geo_json = geo_json

        else:
            logger.debug("Couldn't parse GeoJSON.")
            # logger.debug("We got:", type(geo_json))
            raise ValueError("Couldn't parse GeoJSON: " + geo_json)

        lon1 = 180
        lon2 = -180
        lat1 = 90
        lat2 = -90

        # A regionmask Object
        # A list of the Shapely.Polygons

        selections = list()
        count = 0
        numbers = []
        names = []
        abbrevs = []

        for p in self.geo_json["features"]:

            min_lon, min_lat, max_lon, max_lat = self.bbox(p)

            lat1 = min(min_lat, lat1)
            lat2 = max(max_lat, lat2)
            lon1 = min(min_lon, lon1)
            lon2 = max(max_lon, lon2)

            logger.debug("Found a Feature.")
            if p["geometry"]["type"] == "Polygon" or p["geometry"][
                    "type"] == "MultiPolygon":
                logger.debug("Found Polygon/MultiPolygon #%s" % count)
                s = shape(p["geometry"])
                selections.append(s)
                numbers.append(count)
                names.append("Selection_%s" % count)
                abbrevs.append("SEL_%s" % count)
                count += 1

        logger.debug("Making Region Mask with %s Polygons." % count)
        logger.debug("numbers: %s" % numbers)
        logger.debug("names: %s" % names)
        logger.debug("abbrevs: %s" % abbrevs)
        logger.debug("selections: %s" % selections)

        self.rmask = regionmask.Regions_cls(0, numbers, names, abbrevs,
                                            selections)

        self.selections = selections
        logger.debug(["%s" % sel for sel in selections])

        # Do once and store
        # self.mask = self.get_super_sampled_mask()  # TODO - remove default creation of mask and require setting the transform according to dataset projection

        # hull = ShapeQuery.get_query_hull(self.rmask)
        # lat1, lon2, lat2, lon1 = hull.bounds

        self.spatio_temporal_query = SpatioTemporalQuery(
            lat1, lon1, lat2, lon2, start, finish)
        self.temporal = self.spatio_temporal_query.temporal
        self.spatial = self.spatio_temporal_query.spatial
        self.transform = [0.05, 0.0, 111.975, 0.0, -0.05, -9.974999999999994]
        self.schema = ShapeQuerySchema()
예제 #16
0
    # format data into timeseries
    # ----------------------------------
    lat_min1, lat_max1, lon_min1, lon_max1 = 70, 80, 90, 160
    lat_min2, lat_max2, lon_min2, lon_max2 = 80, 87, 90, 170
    lat_min1, lat_max1, lon_min1, lon_max1 = 79, 80.4, 112, 140
    lat_min2, lat_max2, lon_min2, lon_max2 = 80.45, 87, 128, 155

    roi1 = [[lon_min1, lat_min1], [lon_max1, lat_min1], [lon_max1, lat_max1],
            [lon_min1, lat_max1]]
    roi2 = [[lon_min2, lat_min2], [lon_max2, lat_min2], [lon_max2, lat_max2],
            [lon_min2, lat_max2]]
    roi = [roi1, roi2]
    id = [0, 1]
    name = ['roi_laptev', 'roi_laptev_north']
    abbrev = ['roi1', 'roi2']
    mask = regionmask.Regions_cls('roi', id, name, abbrev, roi)
    mask_ = mask.mask(ds, lon_name='longitude', lat_name='latitude')
    mask_ice = mask.mask(ds_ice, lon_name='longitude', lat_name='latitude')

    # ----- aod
    # ts_aod = ds.aod550.mean(dim=('latitude','longitude'))
    aod = ds.aod550.where(mask_ == 0)
    ts_aod_roi1 = aod.mean(dim=('latitude', 'longitude'))
    ts_aod25_roi1 = aod.quantile(0.25, dim=('latitude', 'longitude'))
    ts_aod75_roi1 = aod.quantile(0.75, dim=('latitude', 'longitude'))
    aod = ds.aod550.where(mask_ == 1)
    ts_aod25_roi2 = aod.quantile(0.25, dim=('latitude', 'longitude'))
    ts_aod75_roi2 = aod.quantile(0.75, dim=('latitude', 'longitude'))
    ts_aod_roi2 = aod.mean(dim=('latitude', 'longitude'))

    # ----- ice
예제 #17
0
 def getRegions( self, region_name: str, name_col: str, shape: gpd.GeoDataFrame ) -> Regions_cls:
     poly_index = 6
     names = [ shape[name_col].tolist()[poly_index] ]
     poly: Polygon = self.remove_third_dimension( shape.geometry.values[poly_index] )
     print( f" Creating region for polygon with bounds = {poly.envelope.bounds}" )
     return regionmask.Regions_cls( region_name, list(range(len(names))), names, names, [poly] )