def calculate_reflectance( acquisition, interpolation_group, satellite_solar_group, slope_aspect_group, relative_slope_group, incident_angles_group, exiting_angles_group, shadow_masks_group, ancillary_group, rori, out_group=None, compression=H5CompressionFilter.LZF, filter_opts=None, normalized_solar_zenith=45.0, esun=None, ): """ Calculates Lambertian, BRDF corrected and BRDF + terrain illumination corrected surface reflectance. :param acquisition: An instance of an acquisition object. :param interpolation_group: The root HDF5 `Group` that contains the interpolated atmospheric coefficients. The dataset pathnames are given by: * DatasetName.INTERPOLATION_FMT :param satellite_solar_group: The root HDF5 `Group` that contains the solar zenith and solar azimuth datasets specified by the pathnames given by: * DatasetName.SOLAR_ZENITH * DatasetName.SOLAR_AZIMUTH * DatasetName.SATELLITE_VIEW * DatasetName.SATELLITE_AZIMUTH * DatasetName.RELATIVE_AZIMUTH :param slope_aspect_group: The root HDF5 `Group` that contains the slope and aspect datasets specified by the pathnames given by: * DatasetName.SLOPE * DatasetName.ASPECT :param relative_slope_group: The root HDF5 `Group` that contains the relative slope dataset specified by the pathname given by: * DatasetName.RELATIVE_SLOPE :param incident_angles_group: The root HDF5 `Group` that contains the incident angle dataset specified by the pathname given by: * DatasetName.INCIDENT :param exiting_angles_group: The root HDF5 `Group` that contains the exiting angle dataset specified by the pathname given by: * DatasetName.EXITING :param shadow_masks_group: The root HDF5 `Group` that contains the combined shadow masks; self shadow, cast shadow (solar), cast shadow (satellite), dataset specified by the pathname given by: * DatasetName.COMBINED_SHADOW :param ancillary_group: The root HDF5 `Group` that contains the Isotropic (iso), RossThick (vol), and LiSparseR (geo) BRDF scalar parameters. The dataset pathnames are given by: * DatasetName.BRDF_FMT :param rori: Threshold for terrain correction. Fuqin to document. :param out_group: If set to None (default) then the results will be returned as an in-memory hdf5 file, i.e. the `core` driver. Otherwise, a writeable HDF5 `Group` object. The dataset names will be given by the format string detailed by: * DatasetName.REFLECTANCE_FMT The reflectance products are: * lambertian * nbar (BRDF corrected reflectance) * nbart (BRDF + terrain illumination corrected reflectance) :param compression: The compression filter to use. Default is H5CompressionFilter.LZF :param filter_opts: A dict of key value pairs available to the given configuration instance of H5CompressionFilter. For example H5CompressionFilter.LZF has the keywords *chunks* and *shuffle* available. Default is None, which will use the default settings for the chosen H5CompressionFilter instance. :param normalized_solar_zenith: A float value type to normalize reflectance to a particular angle. :param esun A float value type. A solar solar irradiance normal to atmosphere in unit of W/sq cm/sr/nm. :return: An opened `h5py.File` object, that is either in-memory using the `core` driver, or on disk. """ geobox = acquisition.gridded_geo_box() bn = acquisition.band_name dname_fmt = DatasetName.INTERPOLATION_FMT.value fv_dataset = interpolation_group[dname_fmt.format(coefficient=AC.FV.value, band_name=bn)] fs_dataset = interpolation_group[dname_fmt.format(coefficient=AC.FS.value, band_name=bn)] b_dataset = interpolation_group[dname_fmt.format(coefficient=AC.B.value, band_name=bn)] s_dataset = interpolation_group[dname_fmt.format(coefficient=AC.S.value, band_name=bn)] a_dataset = interpolation_group[dname_fmt.format(coefficient=AC.A.value, band_name=bn)] dir_dataset = interpolation_group[dname_fmt.format( coefficient=AC.DIR.value, band_name=bn)] dif_dataset = interpolation_group[dname_fmt.format( coefficient=AC.DIF.value, band_name=bn)] ts_dataset = interpolation_group[dname_fmt.format(coefficient=AC.TS.value, band_name=bn)] solar_zenith_dset = satellite_solar_group[DatasetName.SOLAR_ZENITH.value] solar_azimuth_dset = satellite_solar_group[DatasetName.SOLAR_AZIMUTH.value] satellite_v_dset = satellite_solar_group[DatasetName.SATELLITE_VIEW.value] relative_a_dset = satellite_solar_group[DatasetName.RELATIVE_AZIMUTH.value] slope_dataset = slope_aspect_group[DatasetName.SLOPE.value] aspect_dataset = slope_aspect_group[DatasetName.ASPECT.value] relative_s_dset = relative_slope_group[DatasetName.RELATIVE_SLOPE.value] incident_angle_dataset = incident_angles_group[DatasetName.INCIDENT.value] exiting_angle_dataset = exiting_angles_group[DatasetName.EXITING.value] shadow_dataset = shadow_masks_group[DatasetName.COMBINED_SHADOW.value] dname_fmt = DatasetName.BRDF_FMT.value dname = dname_fmt.format(band_name=bn, parameter=BrdfDirectionalParameters.ALPHA_1.value) brdf_alpha1 = ancillary_group[dname][()] dname = dname_fmt.format(band_name=bn, parameter=BrdfDirectionalParameters.ALPHA_2.value) brdf_alpha2 = ancillary_group[dname][()] # Initialise the output file if out_group is None: fid = h5py.File("surface-reflectance.h5", "w", driver="core", backing_store=False) else: fid = out_group if GroupName.STANDARD_GROUP.value not in fid: fid.create_group(GroupName.STANDARD_GROUP.value) if filter_opts is None: filter_opts = {} else: filter_opts = filter_opts.copy() filter_opts["chunks"] = acquisition.tile_size kwargs = compression.config(**filter_opts).dataset_compression_kwargs() grp = fid[GroupName.STANDARD_GROUP.value] kwargs["shape"] = (acquisition.lines, acquisition.samples) kwargs["fillvalue"] = NO_DATA_VALUE kwargs["dtype"] = "int16" # create the datasets dname_fmt = DatasetName.REFLECTANCE_FMT.value dname = dname_fmt.format(product=AP.LAMBERTIAN.value, band_name=bn) lmbrt_dset = grp.create_dataset(dname, **kwargs) dname = dname_fmt.format(product=AP.NBAR.value, band_name=bn) nbar_dset = grp.create_dataset(dname, **kwargs) dname = dname_fmt.format(product=AP.NBART.value, band_name=bn) nbart_dset = grp.create_dataset(dname, **kwargs) # attach some attributes to the image datasets attrs = { "crs_wkt": geobox.crs.ExportToWkt(), "geotransform": geobox.transform.to_gdal(), "no_data_value": kwargs["fillvalue"], "rori_threshold_setting": rori, "platform_id": acquisition.platform_id, "sensor_id": acquisition.sensor_id, "band_id": acquisition.band_id, "band_name": bn, "alias": acquisition.alias, } desc = "Contains the lambertian reflectance data scaled by 10000." attrs["description"] = desc attach_image_attributes(lmbrt_dset, attrs) desc = "Contains the brdf corrected reflectance data scaled by 10000." attrs["description"] = desc attach_image_attributes(nbar_dset, attrs) desc = "Contains the brdf and terrain corrected reflectance data scaled " "by 10000." attrs["description"] = desc attach_image_attributes(nbart_dset, attrs) # process by tile for tile in acquisition.tiles(): # tile indices idx = (slice(tile[0][0], tile[0][1]), slice(tile[1][0], tile[1][1])) # define some static arguments acq_args = {"window": tile, "out_no_data": NO_DATA_VALUE, "esun": esun} f32_args = {"dtype": numpy.float32, "transpose": True} # Read the data corresponding to the current tile for all dataset # Convert the datatype if required and transpose band_data = as_array(acquisition.radiance_data(**acq_args), **f32_args) shadow = as_array(shadow_dataset[idx], numpy.int8, transpose=True) solar_zenith = as_array(solar_zenith_dset[idx], **f32_args) solar_azimuth = as_array(solar_azimuth_dset[idx], **f32_args) satellite_view = as_array(satellite_v_dset[idx], **f32_args) relative_angle = as_array(relative_a_dset[idx], **f32_args) slope = as_array(slope_dataset[idx], **f32_args) aspect = as_array(aspect_dataset[idx], **f32_args) incident_angle = as_array(incident_angle_dataset[idx], **f32_args) exiting_angle = as_array(exiting_angle_dataset[idx], **f32_args) relative_slope = as_array(relative_s_dset[idx], **f32_args) a_mod = as_array(a_dataset[idx], **f32_args) b_mod = as_array(b_dataset[idx], **f32_args) s_mod = as_array(s_dataset[idx], **f32_args) fs = as_array(fs_dataset[idx], **f32_args) fv = as_array(fv_dataset[idx], **f32_args) ts = as_array(ts_dataset[idx], **f32_args) direct = as_array(dir_dataset[idx], **f32_args) diffuse = as_array(dif_dataset[idx], **f32_args) # Allocate the output arrays xsize, ysize = band_data.shape # band_data has been transposed ref_lm = numpy.zeros((ysize, xsize), dtype="int16") ref_brdf = numpy.zeros((ysize, xsize), dtype="int16") ref_terrain = numpy.zeros((ysize, xsize), dtype="int16") # Allocate the work arrays (single row of data) ref_lm_work = numpy.zeros(xsize, dtype="float32") ref_brdf_work = numpy.zeros(xsize, dtype="float32") ref_terrain_work = numpy.zeros(xsize, dtype="float32") # Run terrain correction reflectance( xsize, ysize, rori, brdf_alpha1, brdf_alpha2, acquisition.reflectance_adjustment, kwargs["fillvalue"], band_data, shadow, solar_zenith, solar_azimuth, satellite_view, relative_angle, slope, aspect, incident_angle, exiting_angle, relative_slope, a_mod, b_mod, s_mod, fs, fv, ts, direct, diffuse, ref_lm_work, ref_brdf_work, ref_terrain_work, ref_lm.transpose(), ref_brdf.transpose(), ref_terrain.transpose(), normalized_solar_zenith, ) # Write the current tile to disk lmbrt_dset[idx] = ref_lm nbar_dset[idx] = ref_brdf nbart_dset[idx] = ref_terrain # close any still opened files, arrays etc associated with the acquisition acquisition.close() if out_group is None: return fid
def slope_aspect_arrays(acquisition, dsm_group, buffer_distance, out_group=None, compression=H5CompressionFilter.LZF, filter_opts=None): """ Calculates slope and aspect. :param acquisition: An instance of an acquisition object. :param dsm_group: The root HDF5 `Group` that contains the Digital Surface Model data. The dataset pathname is given by: * DatasetName.DSM_SMOOTHED The dataset must have the same dimensions as `acquisition` plus a margin of widths specified by margin. :param buffer_distance: A number representing the desired distance (in the same units as the acquisition) in which to calculate the extra number of pixels required to buffer an image. Default is 8000. :param out_group: If set to None (default) then the results will be returned as an in-memory hdf5 file, i.e. the `core` driver. Otherwise, a writeable HDF5 `Group` object. The dataset names will be given by the format string detailed by: * DatasetName.SLOPE * DatasetName.ASPECT :param compression: The compression filter to use. Default is H5CompressionFilter.LZF :filter_opts: A dict of key value pairs available to the given configuration instance of H5CompressionFilter. For example H5CompressionFilter.LZF has the keywords *chunks* and *shuffle* available. Default is None, which will use the default settings for the chosen H5CompressionFilter instance. :return: An opened `h5py.File` object, that is either in-memory using the `core` driver, or on disk. """ # Setup the geobox geobox = acquisition.gridded_geo_box() # Retrive the spheroid parameters # (used in calculating pixel size in metres per lat/lon) spheroid, _ = setup_spheroid(geobox.crs.ExportToWkt()) # Are we in projected or geographic space is_utm = not geobox.crs.IsGeographic() # Define Top, Bottom, Left, Right pixel margins margins = pixel_buffer(acquisition, buffer_distance) # Get the x and y pixel sizes _, y_origin = geobox.origin x_res, y_res = geobox.pixelsize # Get acquisition dimensions and add 1 pixel top, bottom, left & right cols, rows = geobox.get_shape_xy() ncol = cols + 2 nrow = rows + 2 # elevation dataset elevation = dsm_group[DatasetName.DSM_SMOOTHED.value] ele_rows, ele_cols = elevation.shape # TODO: check that the index is correct # Define the index to read the DEM subset ystart, ystop = (margins.top - 1, ele_rows - (margins.bottom - 1)) xstart, xstop = (margins.left - 1, ele_cols - (margins.right - 1)) idx = (slice(ystart, ystop), slice(xstart, xstop)) subset = as_array(elevation[idx], dtype=numpy.float32, transpose=True) # Define an array of latitudes # This will be ignored if is_utm == True alat = numpy.array([y_origin - i * y_res for i in range(-1, nrow - 1)], dtype=numpy.float64) # yes, I did mean float64. # Output the reprojected result # Initialise the output files if out_group is None: fid = h5py.File('slope-aspect.h5', driver='core', backing_store=False) else: fid = out_group if GroupName.SLP_ASP_GROUP.value not in fid: fid.create_group(GroupName.SLP_ASP_GROUP.value) if filter_opts is None: filter_opts = {} else: filter_opts = filter_opts.copy() filter_opts['chunks'] = acquisition.tile_size group = fid[GroupName.SLP_ASP_GROUP.value] # metadata for calculation param_group = group.create_group('PARAMETERS') param_group.attrs['dsm_index'] = ((ystart, ystop), (xstart, xstop)) param_group.attrs['pixel_buffer'] = '1 pixel' kwargs = compression.config(**filter_opts).dataset_compression_kwargs() no_data = -999 kwargs['fillvalue'] = no_data # Define the output arrays. These will be transposed upon input slope = numpy.zeros((rows, cols), dtype='float32') aspect = numpy.zeros((rows, cols), dtype='float32') slope_aspect(ncol, nrow, cols, rows, x_res, y_res, spheroid, alat, is_utm, subset, slope.transpose(), aspect.transpose()) # output datasets dname = DatasetName.SLOPE.value slope_dset = group.create_dataset(dname, data=slope, **kwargs) dname = DatasetName.ASPECT.value aspect_dset = group.create_dataset(dname, data=aspect, **kwargs) # attach some attributes to the image datasets attrs = {'crs_wkt': geobox.crs.ExportToWkt(), 'geotransform': geobox.transform.to_gdal(), 'no_data_value': no_data} desc = "The slope derived from the input elevation model." attrs['description'] = desc attach_image_attributes(slope_dset, attrs) desc = "The aspect derived from the input elevation model." attrs['description'] = desc attach_image_attributes(aspect_dset, attrs) if out_group is None: return fid
def incident_angles( satellite_solar_group, slope_aspect_group, out_group=None, compression=H5CompressionFilter.LZF, filter_opts=None, ): """ Calculates the incident angle and the azimuthal incident angle. :param satellite_solar_group: The root HDF5 `Group` that contains the solar zenith and solar azimuth datasets specified by the pathnames given by: * DatasetName.SOLAR_ZENITH * DatasetName.SOLAR_AZIMUTH :param slope_aspect_group: The root HDF5 `Group` that contains the slope and aspect datasets specified by the pathnames given by: * DatasetName.SLOPE * DatasetName.ASPECT :param out_group: If set to None (default) then the results will be returned as an in-memory hdf5 file, i.e. the `core` driver. Otherwise, a writeable HDF5 `Group` object. The dataset names will be as follows: * DatasetName.INCIDENT * DatasetName.AZIMUTHAL_INCIDENT :param compression: The compression filter to use. Default is H5CompressionFilter.LZF :filter_opts: A dict of key value pairs available to the given configuration instance of H5CompressionFilter. For example H5CompressionFilter.LZF has the keywords *chunks* and *shuffle* available. Default is None, which will use the default settings for the chosen H5CompressionFilter instance. :return: An opened `h5py.File` object, that is either in-memory using the `core` driver, or on disk. """ # dataset arrays dname = DatasetName.SOLAR_ZENITH.value solar_zenith_dataset = satellite_solar_group[dname] dname = DatasetName.SOLAR_AZIMUTH.value solar_azimuth_dataset = satellite_solar_group[dname] slope_dataset = slope_aspect_group[DatasetName.SLOPE.value] aspect_dataset = slope_aspect_group[DatasetName.ASPECT.value] geobox = GriddedGeoBox.from_dataset(solar_zenith_dataset) shape = geobox.get_shape_yx() rows, cols = shape crs = geobox.crs.ExportToWkt() # Initialise the output files if out_group is None: fid = h5py.File("incident-angles.h5", "w", driver="core", backing_store=False) else: fid = out_group if GroupName.INCIDENT_GROUP.value not in fid: fid.create_group(GroupName.INCIDENT_GROUP.value) if filter_opts is None: filter_opts = {} grp = fid[GroupName.INCIDENT_GROUP.value] tile_size = solar_zenith_dataset.chunks filter_opts["chunks"] = tile_size kwargs = compression.config(**filter_opts).dataset_compression_kwargs() no_data = numpy.nan kwargs["shape"] = shape kwargs["fillvalue"] = no_data kwargs["dtype"] = "float32" # output datasets dataset_name = DatasetName.INCIDENT.value incident_dset = grp.create_dataset(dataset_name, **kwargs) dataset_name = DatasetName.AZIMUTHAL_INCIDENT.value azi_inc_dset = grp.create_dataset(dataset_name, **kwargs) # attach some attributes to the image datasets attrs = { "crs_wkt": crs, "geotransform": geobox.transform.to_gdal(), "no_data_value": no_data, } desc = "Contains the incident angles in degrees." attrs["description"] = desc attrs["alias"] = "incident" attach_image_attributes(incident_dset, attrs) desc = "Contains the azimuthal incident angles in degrees." attrs["description"] = desc attrs["alias"] = "azimuthal-incident" attach_image_attributes(azi_inc_dset, attrs) # process by tile for tile in generate_tiles(cols, rows, tile_size[1], tile_size[0]): # Row and column start and end locations ystart = tile[0][0] xstart = tile[1][0] yend = tile[0][1] xend = tile[1][1] idx = (slice(ystart, yend), slice(xstart, xend)) # Tile size ysize = yend - ystart xsize = xend - xstart # Read the data for the current tile # Convert to required datatype and transpose sol_zen = as_array(solar_zenith_dataset[idx], dtype=numpy.float32, transpose=True) sol_azi = as_array(solar_azimuth_dataset[idx], dtype=numpy.float32, transpose=True) slope = as_array(slope_dataset[idx], dtype=numpy.float32, transpose=True) aspect = as_array(aspect_dataset[idx], dtype=numpy.float32, transpose=True) # Initialise the work arrays incident = numpy.zeros((ysize, xsize), dtype="float32") azi_incident = numpy.zeros((ysize, xsize), dtype="float32") # Process the current tile incident_angle( xsize, ysize, sol_zen, sol_azi, slope, aspect, incident.transpose(), azi_incident.transpose(), ) # Write the current tile to disk incident_dset[idx] = incident azi_inc_dset[idx] = azi_incident if out_group is None: return fid
def exiting_angles(satellite_solar_group, slope_aspect_group, out_group=None, compression=H5CompressionFilter.LZF, filter_opts=None): """ Calculates the exiting angle and the azimuthal exiting angle. :param satellite_solar_group: The root HDF5 `Group` that contains the satellite view and satellite azimuth datasets specified by the pathnames given by: * DatasetName.SATELLITE_VIEW * DatasetName.SATELLITE_AZIMUTH :param slope_aspect_group: The root HDF5 `Group` that contains the slope and aspect datasets specified by the pathnames given by: * DatasetName.SLOPE * DatasetName.ASPECT :param out_group: If set to None (default) then the results will be returned as an in-memory hdf5 file, i.e. the `core` driver. Otherwise, a writeable HDF5 `Group` object. The dataset names will be as follows: * DatasetName.EXITING * DatasetName.AZIMUTHAL_EXITING :param compression: The compression filter to use. Default is H5CompressionFilter.LZF :filter_opts: A dict of key value pairs available to the given configuration instance of H5CompressionFilter. For example H5CompressionFilter.LZF has the keywords *chunks* and *shuffle* available. Default is None, which will use the default settings for the chosen H5CompressionFilter instance. :return: An opened `h5py.File` object, that is either in-memory using the `core` driver, or on disk. """ # dataset arrays dname = DatasetName.SATELLITE_VIEW.value satellite_view_dataset = satellite_solar_group[dname] dname = DatasetName.SATELLITE_AZIMUTH.value satellite_azimuth_dataset = satellite_solar_group[dname] slope_dataset = slope_aspect_group[DatasetName.SLOPE.value] aspect_dataset = slope_aspect_group[DatasetName.ASPECT.value] geobox = GriddedGeoBox.from_dataset(satellite_view_dataset) shape = geobox.get_shape_yx() rows, cols = shape crs = geobox.crs.ExportToWkt() # Initialise the output files if out_group is None: fid = h5py.File('exiting-angles.h5', driver='core', backing_store=False) else: fid = out_group if GroupName.EXITING_GROUP.value not in fid: fid.create_group(GroupName.EXITING_GROUP.value) if filter_opts is None: filter_opts = {} grp = fid[GroupName.EXITING_GROUP.value] tile_size = satellite_view_dataset.chunks filter_opts['chunks'] = tile_size kwargs = compression.config(**filter_opts).dataset_compression_kwargs() no_data = -999 kwargs['shape'] = shape kwargs['fillvalue'] = no_data kwargs['dtype'] = 'float32' # output datasets dataset_name = DatasetName.EXITING.value exiting_dset = grp.create_dataset(dataset_name, **kwargs) dataset_name = DatasetName.AZIMUTHAL_EXITING.value azi_exit_dset = grp.create_dataset(dataset_name, **kwargs) # attach some attributes to the image datasets attrs = { 'crs_wkt': crs, 'geotransform': geobox.transform.to_gdal(), 'no_data_value': no_data } desc = "Contains the exiting angles in degrees." attrs['description'] = desc attrs['alias'] = 'exiting' attach_image_attributes(exiting_dset, attrs) desc = "Contains the azimuthal exiting angles in degrees." attrs['description'] = desc attrs['alias'] = 'azimuthal-exiting' attach_image_attributes(azi_exit_dset, attrs) # process by tile for tile in generate_tiles(cols, rows, tile_size[1], tile_size[0]): # Row and column start and end locations ystart = tile[0][0] xstart = tile[1][0] yend = tile[0][1] xend = tile[1][1] idx = (slice(ystart, yend), slice(xstart, xend)) # Tile size ysize = yend - ystart xsize = xend - xstart # Read the data for the current tile # Convert to required datatype and transpose sat_view = as_array(satellite_view_dataset[idx], dtype=numpy.float32, transpose=True) sat_azi = as_array(satellite_azimuth_dataset[idx], dtype=numpy.float32, transpose=True) slope = as_array(slope_dataset[idx], dtype=numpy.float32, transpose=True) aspect = as_array(aspect_dataset[idx], dtype=numpy.float32, transpose=True) # Initialise the work arrays exiting = numpy.zeros((ysize, xsize), dtype='float32') azi_exiting = numpy.zeros((ysize, xsize), dtype='float32') # Process the current tile exiting_angle(xsize, ysize, sat_view, sat_azi, slope, aspect, exiting.transpose(), azi_exiting.transpose()) # Write the current to disk exiting_dset[idx] = exiting azi_exit_dset[idx] = azi_exiting if out_group is None: return fid