def get_crs(ds: nc.Dataset) -> CRS: attrs = ds.__dict__ # type: dict proj_id = attrs['MAP_PROJ'] if proj_id == ProjectionTypes.LAT_LON: pole_lat = attrs['POLE_LAT'] pole_lon = attrs['POLE_LON'] if pole_lat != 90.0 or pole_lon != 0.0: raise UnsupportedError( 'Geographic coordinate system with rotated pole is not supported' ) crs = CRS.create_lonlat() elif proj_id == ProjectionTypes.LAMBERT_CONFORMAL: crs = CRS.create_lambert(truelat1=attrs['TRUELAT1'], truelat2=attrs['TRUELAT2'], origin=LonLat(lon=attrs['STAND_LON'], lat=attrs['MOAD_CEN_LAT'])) elif proj_id == ProjectionTypes.MERCATOR: crs = CRS.create_mercator(truelat1=attrs['TRUELAT1'], origin_lon=attrs['STAND_LON']) elif proj_id == ProjectionTypes.POLAR_STEREOGRAPHIC: crs = CRS.create_polar(truelat1=attrs['TRUELAT1'], origin_lon=attrs['STAND_LON']) else: raise UnsupportedError( 'Projection {} is not supported'.format(proj_id)) return crs
def find_mpiexec() -> str: plat = platform.system() paths = [] if plat == 'Windows': help_option = [] if 'MSMPI_BIN' in os.environ: paths.append(os.path.join(os.environ['MSMPI_BIN'], 'mpiexec.exe')) elif plat in ['Darwin', 'Linux']: help_option = ['-h'] paths.append('mpiexec') # Sometimes /usr/local/bin is not in PATH. paths.append('/usr/local/bin/mpiexec') else: raise UnsupportedError(f'Platform "{plat}" is not supported') mpiexec_path = None for path in paths: try: subprocess.check_output([path] + help_option, startupinfo=STARTUPINFO) except FileNotFoundError: pass else: mpiexec_path = path break if mpiexec_path is None: raise UserError('MPI not found') return mpiexec_path
def get_dist_url(dist: dict, mpi: bool) -> str: try: os_dist = dist[platform.system()] except KeyError: raise UnsupportedError( 'Pre-compiled distributions are not available for your operating system' ) if mpi: try: url = os_dist['dmpar'] except KeyError: assert 'serial' in os_dist raise UnsupportedError( 'A pre-compiled MPI distribution is not available for your operating system' ) else: try: url = os_dist['serial'] except KeyError: assert 'dmpar' in os_dist raise UnsupportedError( 'A pre-compiled non-MPI distribution is not available for your operating system' ) return url
def landmask_water(self) -> List[int]: assert self.is_landuse water = set() # type: Set[int] fields = [self.iswater, self.islake] for val in fields: if val is not None: water.add(val) scheme = self.landuse_scheme_or_default if scheme == 'USGS': water.add(16) elif scheme == 'MODIFIED_IGBP_MODIS_NOAH': water.add(17) else: raise UnsupportedError(f'Land use scheme {scheme} is not supported') return sorted(water)
def convert_wps_binary_to_vrt_dataset( folder: str, use_vsi: bool = False) -> Tuple[str, str, str, Callable[[], None]]: """Converts a WPS Binary format dataset into a mosaic VRT dataset referencing per-tile VRT datasets.""" m = read_wps_binary_index_file(folder) if m.proj_id == 'regular_ll' and m.stdlon is not None: raise UnsupportedError('Rotated pole system is not supported') # scan folder for available tiles tile_filename_re = re.compile('^({d})-({d})\.({d})-({d})$'.format( d='\d{' + str(m.filename_digits) + '}')) tiles = [] for filename in os.listdir(folder): match = tile_filename_re.match(filename) if match: tiles.append({ 'filename': filename, 'path': os.path.join(folder, filename), 'start_x': int(match.group(1)), 'end_x': int(match.group(2)), 'start_y': int(match.group(3)), 'end_y': int(match.group(4)) }) if not tiles: raise UserError(f'No tiles found in {folder}') # determine raster dimensions xsize = max(tile['end_x'] for tile in tiles) # type: int ysize = max(tile['end_y'] for tile in tiles) # type: int zsize = m.tile_z_end - m.tile_z_start + 1 # convert to GDAL metadata dtype_mapping = { (1, False): gdal.GDT_Byte, # GDAL only supports unsigned byte (2, False): gdal.GDT_UInt16, (2, True): gdal.GDT_Int16, (3, False): gdal.GDT_UInt32, (3, True): gdal.GDT_Int32 } try: dtype = dtype_mapping[(m.word_size, m.signed)] except KeyError: raise UnsupportedError( 'word_size={} signed={} is not supported'.format( m.word_size, m.signed)) if m.proj_id == 'regular_ll': crs = CRS.create_lonlat() elif m.proj_id == 'lambert': # The map distortion of a Lambert Conformal projection is fully # defined by the two true latitudes. # # However, the longitude of origin is important for WRF as well, # since we only deal with upright rectangles (the domains) on the map. # For that reason, WRF allows the user to define the "standard longitude" # which is the longitude of origin. # # The latitude of origin on the other hand does not have any significance # here and cannot be specified by the user. The geo transform for a given # grid is computed based on any arbitrary latitude of origin (see below). # In QGIS, the only difference are the displayed projected y coordinates, # but the actual grid georeferencing is unaffected. # This is possible as WRF's georeferencing metadata is based on geographical # reference coordinates for a grid cell, not projected coordinates. arbitrary_latitude_origin = (m.truelat1 + m.truelat2) / 2 origin = LonLat(lon=m.stdlon, lat=arbitrary_latitude_origin) crs = CRS.create_lambert(m.truelat1, m.truelat2, origin) elif m.proj_id == 'mercator': # The map distortion of a Mercator projection is fully # defined by the true latitude. # The longitude of origin does not have any significance and # any arbitrary value is handled when computing the geo transform # for a given grid (see below). See also the comment above for Lambert. arbitrary_longitude_origin = 0 crs = CRS.create_mercator(m.truelat1, arbitrary_longitude_origin) elif m.proj_id == 'albers_nad83': # See the comment above for Lambert. The same applies here. arbitrary_latitude_origin = (m.truelat1 + m.truelat2) / 2 origin = LonLat(lon=m.stdlon, lat=arbitrary_latitude_origin) crs = CRS.create_albers_nad83(m.truelat1, m.truelat2, origin) # FIXME handle polar vs polar_wgs84 differently elif m.proj_id == 'polar': # See the comment above for Lambert. The same applies here. crs = CRS.create_polar(m.truelat1, m.stdlon) elif m.proj_id == 'polar_wgs84': # See the comment above for Lambert. The same applies here. crs = CRS.create_polar(m.truelat1, m.stdlon) else: raise UnsupportedError(f'Projection {m.proj_id} is not supported') known_x_idx_gdal = m.known_idx.x - 0.5 if m.top_bottom: known_y_idx_gdal = ysize - m.known_idx.y - 0.5 dy_gdal = -m.dy else: known_y_idx_gdal = m.known_idx.y - 0.5 dy_gdal = m.dy known_xy = crs.to_xy(m.known_lonlat) upper_left_x = known_xy.x - known_x_idx_gdal * m.dx upper_left_y = known_xy.y + known_y_idx_gdal * m.dy geo_transform = (upper_left_x, m.dx, 0, upper_left_y, 0, dy_gdal) # print('known_x_idx_gdal: {}'.format(known_x_idx_gdal)) # print('known_y_idx_gdal: {}'.format(known_y_idx_gdal)) # print('known_xy: {}'.format(m.known_xy)) # print('upper_left_x: {}'.format(upper_left_x)) # print('upper_left_y: {}'.format(upper_left_y)) # VRTRawRasterBand metadata line_width = m.word_size * (m.tile_x + m.tile_bdr * 2 ) # x size incl. border tile_size = line_width * (m.tile_y + m.tile_bdr * 2 ) # tile size incl. border line_offset = line_width image_offset = m.tile_bdr * line_width + m.tile_bdr * m.word_size pixel_offset = m.word_size byte_order = 'LSB' if m.little_endian else 'MSB' # create tile VRTs if use_vsi: out_dir = get_temp_vsi_path(ext='') else: out_dir = get_temp_dir() driver = gdal.GetDriverByName('VRT') # type: gdal.Driver tile_vrt_paths = {} for tile in tiles: vsi_path = '{}/{}.vrt'.format(out_dir, tile['filename']) vrt = driver.Create(vsi_path, m.tile_x, m.tile_y, 0) # type: gdal.Dataset for z in range(m.tile_z_start - 1, m.tile_z_end): options = [ 'subClass=VRTRawRasterBand', 'SourceFilename={}'.format(tile['path']), 'relativeToVRT=0', 'ImageOffset={}'.format(z * tile_size + image_offset), 'PixelOffset={}'.format(pixel_offset), 'LineOffset={}'.format(line_offset), 'ByteOrder=' + byte_order ] vrt.AddBand(dtype, options) vrt.FlushCache() tile_vrt_paths[tile['filename']] = vsi_path # create mosaic VRT mosaic_vrt_path = '{}/mosaic.vrt'.format(out_dir) vrt = driver.Create(mosaic_vrt_path, xsize, ysize, zsize, dtype) # type: gdal.Dataset vrt.SetProjection(crs.proj4) vrt.SetGeoTransform(geo_transform) if m.categorical: color_table, cat_names = get_gdal_categories(m.categories, m.category_min, m.category_max) for band_idx in range(1, zsize + 1): band = vrt.GetRasterBand(band_idx) # type: gdal.Band if m.missing_value is not None: band.SetNoDataValue(m.missing_value) band.SetScale(m.scale_factor) if m.categorical: band.SetRasterColorInterpretation(gdal.GCI_PaletteIndex) band.SetRasterColorTable(color_table) band.SetRasterCategoryNames(cat_names) sources = {} for idx, tile in enumerate(tiles): tile_vrt_path = tile_vrt_paths[tile['filename']] if m.top_bottom: end_y = ysize - tile['start_y'] - 1 start_y = end_y - m.tile_y + 1 else: start_y = tile['start_y'] - 1 sources['source_{}'.format(idx)] = (''' <SimpleSource> <SourceFilename relativeToVRT="0">{path}</SourceFilename> <SourceBand>{band}</SourceBand> <SrcRect xOff="0" yOff="0" xSize="{tile_x}" ySize="{tile_y}" /> <DstRect xOff="{offset_x}" yOff="{offset_y}" xSize="{tile_x}" ySize="{tile_y}" /> </SimpleSource>''').format(path=tile_vrt_path, band=band_idx, tile_x=m.tile_x, tile_y=m.tile_y, offset_x=tile['start_x'] - 1, offset_y=start_y) band.SetMetadata(sources, 'vrt_sources') vrt.FlushCache() vrt_paths = [mosaic_vrt_path] + list(tile_vrt_paths.values()) if use_vsi: dispose = partial(remove_vsis, vrt_paths) else: dispose = partial(remove_dir, out_dir) short_name = os.path.basename(folder) title = short_name if m.units and m.units != 'category': title += ' in ' + m.units if m.description: title += ' (' + m.description + ')' # The title is returned as VRT does not support dataset descriptions. return mosaic_vrt_path, title, short_name, dispose
def convert_nml_to_project_domains(nml: dict) -> List[dict]: max_dom = nml['share']['max_dom'] # type: int nml = nml['geogrid'] map_proj = nml['map_proj'] # type: str parent_id = nml['parent_id'] # type: List[int] parent_grid_ratio = nml['parent_grid_ratio'] # type: List[int] i_parent_start = nml['i_parent_start'] # type: List[int] j_parent_start = nml['j_parent_start'] # type: List[int] e_we = nml['e_we'] # type: List[int] e_sn = nml['e_sn'] # type: List[int] dx = [nml['dx']] # type: List[float] dy = [nml['dy']] # type: List[float] ref_lon = nml['ref_lon'] # type: float ref_lat = nml['ref_lat'] # type: float truelat1 = nml.get('truelat1') # type: float truelat2 = nml.get('truelat2') # type: float stand_lon = nml.get('stand_lon', 0.0) # type: float # Check that there are no domains with 2 nests on the same level if parent_id != [1] + list(range(1, max_dom)): raise UserError('Due to the way domains are represented in GIS4WRF ' 'each parent domain can have only one nested domain') # Check whether ref_x/ref_y is omitted, so that we can assume ref == center. if 'ref_x' in nml or 'ref_y' in nml: raise UnsupportedError('ref_x/ref_y is not supported in namelist') # Create CRS object from projection metadata. # See wps_binary_to_gdal.py for further explanations regarding latitude # and longitude of origin. if map_proj == 'lat-lon': if stand_lon != 0.0: raise UnsupportedError( 'Rotated lat-lon projection is not supported') crs = CRS.create_lonlat() elif map_proj == 'lambert': arbitrary_latitude_origin = (truelat1 + truelat2) / 2 origin = LonLat(lon=stand_lon, lat=arbitrary_latitude_origin) crs = CRS.create_lambert(truelat1, truelat2, origin) elif map_proj == 'mercator': arbitrary_longitude_origin = 0 crs = CRS.create_mercator(truelat1, arbitrary_longitude_origin) elif map_proj == 'polar': crs = CRS.create_polar(truelat1, stand_lon) else: raise UnsupportedError(f'Map projection "{map_proj}" is not supported') ref_xy = crs.to_xy(LonLat(lon=ref_lon, lat=ref_lat)) ref_x = [ref_xy.x] # type: List[float] ref_y = [ref_xy.y] # type: List[float] min_x = [] # type: List[float] min_y = [] # type: List[float] padding_left = [] # type: List[int] padding_bottom = [] # type: List[int] padding_right = [] # type: List[int] padding_top = [] # type: List[int] cols = [i - 1 for i in e_we] rows = [i - 1 for i in e_sn] for idx in range(max_dom - 1): # Calculate horizontal grid spacing for inner domain dx.append(dx[idx] / parent_grid_ratio[idx + 1]) dy.append(dy[idx] / parent_grid_ratio[idx + 1]) if idx == 0: # Calculate min coordinates for outermost domain min_x.append(ref_x[idx] - (dx[idx] * (cols[idx] / 2))) min_y.append(ref_y[idx] - (dy[idx] * (rows[idx] / 2))) # Calculate min coordinates for outer domain min_x.append(min_x[idx] + (dx[idx] * (i_parent_start[idx + 1] - 1))) min_y.append(min_y[idx] + (dy[idx] * (j_parent_start[idx + 1] - 1))) # Calculate center coordinates for inner domain ref_x.append(min_x[idx + 1] + (dx[idx + 1] * (cols[idx + 1] / 2))) ref_y.append(min_y[idx + 1] + (dy[idx + 1] * (rows[idx + 1] / 2))) padding_left.append(i_parent_start[idx + 1] - 1) padding_bottom.append(j_parent_start[idx + 1] - 1) padding_right.append(cols[idx] - padding_left[idx] - cols[idx + 1] // parent_grid_ratio[idx + 1]) padding_top.append(rows[idx] - padding_bottom[idx] - rows[idx + 1] // parent_grid_ratio[idx + 1]) ref_lonlat = crs.to_lonlat(Coordinate2D(x=ref_x[-1], y=ref_y[-1])) first_domain = { 'map_proj': map_proj, 'cell_size': [dx[-1], dy[-1]], 'center_lonlat': [ref_lonlat.lon, ref_lonlat.lat], 'domain_size': [cols[-1], rows[-1]], 'stand_lon': stand_lon, } if truelat1 is not None: first_domain['truelat1'] = truelat1 if truelat2 is not None: first_domain['truelat2'] = truelat2 if stand_lon is not None: first_domain['stand_lon'] = stand_lon domains = [first_domain] for i in range(max_dom - 1): domains.append({ 'parent_cell_size_ratio': parent_grid_ratio[::-1][:-1][i], "padding_left": padding_left[::-1][i], "padding_right": padding_right[::-1][i], "padding_bottom": padding_bottom[::-1][i], "padding_top": padding_top[::-1][i] }) return domains
def convert_to_wps_binary(input_path: str, output_folder: str, is_categorical: bool, units: Optional[str]=None, description: Optional[str]=None, strict_datum: bool=True) -> GeogridBinaryDataset: ''' Losslessly convert common geo formats to WPS binary format. If the given input file has a CRS or data type unsupported by WRF then an error is raised. :param input_path: Path to GDAL-supported raster file. :param output_folder: Path to output folder, will be created if not existing :param is_categorical: Whether the data is categorical, otherwise continuous :param units: units for continuous data :param description: single-line dataset description :param strict_datum: if True, fail if the input datum is not supported by WRF, otherwise ignore mismatch ''' os.makedirs(output_folder, exist_ok=True) if os.listdir(output_folder): raise ValueError('Output folder must be empty') # FIXME if there is no nodata value, ask the user if it really has no nodata or ask for the value src_ds = gdal.Open(input_path) # type: gdal.Dataset xsize, ysize = src_ds.RasterXSize, src_ds.RasterYSize if xsize > MAX_SIZE or ysize > MAX_SIZE: raise UserError(f'Dataset has more than {MAX_SIZE} rows or columns: {ysize} x {xsize}, consider downsampling') filename_digits = 6 if xsize > 99999 or ysize > 99999 else 5 if src_ds.GetLayerCount() > 1: raise UnsupportedError('Dataset has more than one layer which is unsupported') band = src_ds.GetRasterBand(1) # type: gdal.Band src_no_data_value = band.GetNoDataValue() has_no_data_value = src_no_data_value is not None tilesize_x = find_tile_size(xsize, try_hard=not has_no_data_value) tilesize_y = find_tile_size(ysize, try_hard=not has_no_data_value) is_perfect_tiling = xsize % tilesize_x == 0 and ysize % tilesize_y == 0 if is_categorical or (tilesize_x == xsize and tilesize_y == ysize): tile_bdr = 0 else: # TODO write unit test that checks whether halo areas have correct values tile_bdr = 3 if tile_bdr > 0 and not has_no_data_value: raise UserError('No-data value required as dataset is continuous and halo is non-zero') if not is_perfect_tiling and not has_no_data_value: raise UserError('No-data value required as no perfect tile size could be found') tilesize_bdr_x = tilesize_x + 2*tile_bdr tilesize_bdr_y = tilesize_y + 2*tile_bdr tiles_x = list(range(0, xsize, tilesize_x)) tiles_y = list(range(0, ysize, tilesize_y)) ysize_pad = tilesize_y * len(tiles_y) # ysize including padding caused by imperfect tiling # write 'index' file with metadata index_path = os.path.join(output_folder, 'index') index_dict, datum_mismatch, inv_scale_factor, dst_dtype, dst_no_data_value = create_index_dict( src_ds, tilesize_x, tilesize_y, ysize_pad, tile_bdr, filename_digits, is_categorical, units, description, strict_datum) write_index_file(index_path, index_dict) np_dst_dtype = gdal_array.GDALTypeCodeToNumericTypeCode(dst_dtype) needs_scaling = inv_scale_factor is not None # As we have no control over the auxiliarly files that are created as well during conversion # we do everything in a temporary folder and move the binary file out after the conversion. # This keeps everything clean and tidy. tmp_dir = tempfile.mkdtemp() tmp_bin_path = os.path.join(tmp_dir, 'data.bin') driver = gdal.GetDriverByName('ENVI') # type: gdal.Driver# dy = src_ds.GetGeoTransform()[5] try: for start_x in tiles_x: for start_y in tiles_y: end_x = start_x + tilesize_x - 1 end_y = start_y + tilesize_y - 1 start_bdr_x = start_x - tile_bdr start_bdr_y = start_y - tile_bdr end_bdr_x = end_x + tile_bdr end_bdr_y = end_y + tile_bdr # read source data offset_x = max(0, start_bdr_x) offset_y = max(0, start_bdr_y) if end_bdr_x >= xsize: datasize_x = xsize - offset_x else: datasize_x = end_bdr_x - offset_x + 1 if end_bdr_y >= ysize: datasize_y = ysize - offset_y else: datasize_y = end_bdr_y - offset_y + 1 src_data = band.ReadAsArray(offset_x, offset_y, datasize_x, datasize_y) if dy > 0: src_data = src_data[::-1] # scale if necessary (float data only) if needs_scaling: # TODO test if scaling with no-data works if has_no_data_value: src_data = ma.masked_equal(src_data, src_no_data_value) src_data *= inv_scale_factor np.round(src_data, out=src_data) if has_no_data_value: src_data = ma.filled(src_data, dst_no_data_value) # pad incomplete tile with nodata value if datasize_x == tilesize_bdr_x and datasize_y == tilesize_bdr_y: dst_data = src_data else: assert has_no_data_value dst_data = np.empty((tilesize_bdr_y, tilesize_bdr_x), np_dst_dtype) data_start_x = offset_x - start_bdr_x data_start_y = offset_y - start_bdr_y dst_data[data_start_y:data_start_y+datasize_y,data_start_x:data_start_x+datasize_x] = src_data if start_bdr_x < 0: dst_data[:,:data_start_x] = dst_no_data_value if start_bdr_y < 0: dst_data[:data_start_y,:] = dst_no_data_value if end_bdr_x >= xsize: dst_data[:,data_start_x+datasize_x:] = dst_no_data_value if end_bdr_y >= ysize: dst_data[data_start_y+datasize_y:,:] = dst_no_data_value # create tile file dst_ds = driver.Create(tmp_bin_path, tilesize_bdr_x, tilesize_bdr_y, 1, dst_dtype) # type: gdal.Dataset dst_band = dst_ds.GetRasterBand(1) # type: gdal.Band dst_band.WriteArray(dst_data) # write to disk dst_ds.FlushCache() del dst_ds # move to final location with WPS-specific filename convention fmt_int = '{:0' + str(filename_digits) + 'd}' fmt_filename = '{fmt}-{fmt}.{fmt}-{fmt}'.format(fmt=fmt_int) if dy < 0: end_y = ysize_pad - start_y - 1 start_y = end_y - tilesize_y + 1 final_path = os.path.join(output_folder, fmt_filename.format( start_x + 1, end_x + 1, start_y + 1, end_y + 1)) shutil.move(tmp_bin_path, final_path) return GeogridBinaryDataset(index_path, datum_mismatch) finally: shutil.rmtree(tmp_dir)
def create_index_dict(dataset: gdal.Dataset, tilesize_x: int, tilesize_y: int, ysize_pad: int, tile_bdr: int, filename_digits: int, is_categorical: bool, units: Optional[str]=None, description: Optional[str]=None, strict_datum: bool=True) -> Tuple[Dict[str, Any], DatumMismatch, Optional[float], int, Optional[float]]: ''' Returns a dictionary that can be used for writing a WPS Binary format index file. If the given dataset has a CRS or data type unsupported by WRF then an error is raised. See also :func:`write_index_file`. ''' band = dataset.GetRasterBand(1) # type: gdal.Band dtype = band.DataType if dtype in DTYPE_INT: no_data_value = band.GetNoDataValue() # type: Optional[float] scale_factor = band.GetScale() inv_scale_factor = None if band.GetOffset() != 0: raise UnsupportedError('Integer data with offset not supported') elif dtype in DTYPE_FLOAT: if is_categorical: raise UserError('Categorical data must have integer-type data but is float') assert band.GetOffset() == 0 assert band.GetScale() == 1 # WPS binary doesn't support floating point data. # Floating point data must be converted to integers by scaling and rounding. inv_scale_factor, min_max = compute_inv_scale_factor(read_blocks(band)) scale_factor = 1/inv_scale_factor min_, max_ = min_max min_scaled = round(min_ * inv_scale_factor) max_scaled = round(max_ * inv_scale_factor) dtype = get_optimal_dtype(min_scaled, max_scaled) if band.GetNoDataValue() is None: no_data_value = None else: # TODO may fail if value range equals dtype range # adjusting the scaling factor slightly to make room for a no-data value may help no_data_value = get_no_data_value(dtype, min_scaled, max_scaled) #print('Scale factor: {}'.format(scale_factor)) #print('Min/max: {}'.format(min_max)) #print('Min/max scaled: {}'.format((min_scaled, max_scaled))) #print('No data: {}'.format(no_data_value)) else: assert False, "Unsupported data type: {}".format(gdal.GetDataTypeName(dtype)) signed = gdal_dtype_is_signed(dtype) wordsize = gdal.GetDataTypeSize(dtype) // 8 wkt = dataset.GetProjection() srs = osr.SpatialReference(wkt) truelat1 = truelat2 = stand_lon = None geotransform = dataset.GetGeoTransform() dx = geotransform[1] dy = geotransform[5] assert dx > 0 # dy can be negative, see below projection = None datum_mismatch = None if srs.IsGeographic(): if srs.EPSGTreatsAsLatLong(): raise UnsupportedError("Unsupported axis order: Lat/Lon, must be Lon/Lat") if not CRS.is_wrf_sphere_datum(srs): datum_mismatch = DatumMismatch( expected='WRF Sphere (6370km)', actual='a={}m b={}m'.format(srs.GetSemiMajor(), srs.GetSemiMinor())) if datum_mismatch and strict_datum: raise UnsupportedError("Unsupported datum, must be based on a sphere with " + "radius {}m, but is an ellipsoid with a={}m b={}m".format( WRF_EARTH_RADIUS, srs.GetSemiMajor(), srs.GetSemiMinor())) projection = 'regular_ll' elif srs.IsProjected(): proj = srs.GetAttrValue('projection') datum = srs.GetAttrValue('datum') if proj in ['Albers_Conic_Equal_Area', 'Lambert_Conformal_Conic_2SP', 'Mercator_2SP']: truelat1 = srs.GetNormProjParm('standard_parallel_1') if proj == 'Polar_Stereographic': truelat1 = srs.GetNormProjParm('latitude_of_origin') if proj in ['Albers_Conic_Equal_Area', 'Lambert_Conformal_Conic_2SP']: truelat2 = srs.GetNormProjParm('standard_parallel_2') if proj == 'Albers_Conic_Equal_Area': stand_lon = srs.GetNormProjParm('longitude_of_center') if proj in ['Lambert_Conformal_Conic_2SP', 'Mercator_2SP', 'Polar_Stereographic']: stand_lon = srs.GetNormProjParm('central_meridian') if proj == "Albers_Conic_Equal_Area": if datum != "North_American_Datum_1983": datum_mismatch = DatumMismatch(expected='NAD83', actual=datum) projection = 'albers_nad83' elif proj == "Lambert_Conformal_Conic_2SP": if not CRS.is_wrf_sphere_datum(srs): datum_mismatch = DatumMismatch(expected='WRF Sphere (6370km)', actual=datum) projection = 'lambert' elif proj == "Mercator_2SP": if not CRS.is_wrf_sphere_datum(srs): datum_mismatch = DatumMismatch(expected='WRF Sphere (6370km)', actual=datum) projection = 'mercator' # For polar stereographic we don't allow datum mismatch in non-strict mode # as it would be ambiguous which WPS projection ID to choose. elif proj == "Polar_Stereographic" and datum == 'WGS_1984': projection = 'polar_wgs84' elif proj == "Polar_Stereographic" and CRS.is_wrf_sphere_datum(srs): projection = 'polar' if projection is None or (datum_mismatch and strict_datum): raise UnsupportedError("Unsupported projection/datum: {}; {}".format(proj, datum)) else: raise UnsupportedError("Unsupported SRS type, must be geographic or projected") if units is None and is_categorical: units = 'category' # gdal always uses system byte order when creating ENVI files is_little_endian = sys.byteorder == 'little' # WPS does not support the concept of negative dy and requires that # the highest y coordinate corresponds to the highest y index. # If row_order=top_bottom (which we use), then the highest y index corresponds to # the row that is stored first in the file. # If row_order=bottom_top, then the highest y index corresponds to # the row that is stored last in the file. # Index coordinates in WPS do not start from 0 but from 1 where (1,1) # corresponds to the center of the cell. GDAL (0,0) corresponds to the corner of the cell. # See also http://www2.mmm.ucar.edu/wrf/users/FAQ_files/FAQ_wps_intermediate_format.html. half_cell = 0.5 # WPS index coordinates known_x = known_y = 1.0 # GDAL index coordinates x_idx = known_x - half_cell if dy < 0: y_idx = ysize_pad - known_y + half_cell else: y_idx = known_y - half_cell known_lonlat = CRS(srs=srs).to_lonlat(get_crs_coordinates(dataset, x_idx, y_idx)) metadata = { 'type': 'categorical' if is_categorical else 'continuous', 'endian': 'little' if is_little_endian else 'big', 'signed': 'yes' if signed else 'no', 'wordsize': wordsize, 'row_order': 'top_bottom', 'projection': projection, 'dx': dx, 'dy': abs(dy), 'known_x': known_x, 'known_y': known_y, 'known_lat': known_lonlat.lat, 'known_lon': known_lonlat.lon, 'tile_x': tilesize_x, 'tile_y': tilesize_y, 'tile_z': 1, 'tile_bdr': tile_bdr } if filename_digits > 5: metadata['filename_digits'] = filename_digits if scale_factor != 1: metadata['scale_factor'] = scale_factor if no_data_value is not None: metadata['missing_value'] = float(no_data_value) if is_categorical: # Note that ComputeRasterMinMax ignores pixels with no-data value. band_min, band_max = band.ComputeRasterMinMax() assert band_min == int(band_min) assert band_max == int(band_max) metadata['category_min'] = int(band_min) metadata['category_max'] = int(band_max) if truelat1 is not None: metadata['truelat1'] = truelat1 if truelat2 is not None: metadata['truelat2'] = truelat2 if stand_lon is not None: metadata['stdlon'] = stand_lon if units is not None: metadata['units'] = units if description is not None: metadata['description'] = description return metadata, datum_mismatch, inv_scale_factor, dtype, no_data_value