Beispiel #1
0
def write_measurement_h5(
    p: DatasetAssembler,
    name: str,
    g: h5py.Dataset,
    overviews=images.DEFAULT_OVERVIEWS,
    overview_resampling=Resampling.nearest,
    expand_valid_data=True,
    file_id: str = None,
):
    """
    Write a measurement by copying it from a hdf5 dataset.
    """
    if hasattr(g, "chunks"):
        data = g[:]
    else:
        data = g

    p.write_measurement_numpy(
        name=name,
        array=data,
        grid_spec=images.GridSpec(
            shape=g.shape,
            transform=Affine.from_gdal(*g.attrs["geotransform"]),
            crs=CRS.from_wkt(g.attrs["crs_wkt"]),
        ),
        nodata=(g.attrs.get("no_data_value")),
        overviews=overviews,
        overview_resampling=overview_resampling,
        expand_valid_data=expand_valid_data,
        file_id=file_id,
    )
def write_measurement_h5(
    p: DatasetAssembler,
    full_name: str,
    g: h5py.Dataset,
    overviews=images.DEFAULT_OVERVIEWS,
    overview_resampling=Resampling.nearest,
    expand_valid_data=True,
    file_id: str = None,
):
    """
    Write a measurement by copying it from a hdf5 dataset.
    """
    if hasattr(g, "chunks"):
        data = g[:]
    else:
        data = g

    product_name, band_name = full_name.split(":")
    p.write_measurement_numpy(
        array=data,
        grid_spec=images.GridSpec(
            shape=g.shape,
            transform=Affine.from_gdal(*g.attrs["geotransform"]),
            crs=CRS.from_wkt(g.attrs["crs_wkt"]),
        ),
        nodata=g.attrs.get("no_data_value"),
        overviews=overviews,
        overview_resampling=overview_resampling,
        expand_valid_data=expand_valid_data,
        file_id=file_id,
        # Because of our strange sub-products and filename standards, we want the
        # product_name to be included in the recorded band metadata,
        # but not in its filename.
        # So we manually calculate a filename without the extra product name prefix.
        name=full_name,
        path=p.names.measurement_filename(band_name, "tif", file_id=file_id),
    )
Beispiel #3
0
def _create_contiguity(
    p: DatasetAssembler,
    product_list: Iterable[str],
    resolution_yx: Tuple[float, float],
    timedelta_product: str = "nbar",
    timedelta_data: numpy.ndarray = None,
):
    """
    Create the contiguity (all pixels valid) dataset.

    Write a contiguity mask file based on the intersection of valid data pixels across all
    bands from the input files.
    """
    for product in product_list:
        contiguity = None
        for grid, band_name, path in p.iter_measurement_paths():
            if not band_name.startswith(f"{product.lower()}:"):
                continue
            # Only our given res group (no pan band in Landsat)
            if grid.resolution_yx != resolution_yx:
                continue

            with rasterio.open(path) as ds:
                ds: DatasetReader
                if contiguity is None:
                    contiguity = numpy.ones((ds.height, ds.width),
                                            dtype="uint8")
                    geobox = GridSpec.from_rio(ds)
                elif ds.shape != contiguity.shape:
                    raise NotImplementedError(
                        "Contiguity from measurements of different shape")

                for band in ds.indexes:
                    contiguity &= ds.read(band) > 0

        if contiguity is None:
            secho(f"No images found for requested product {product}", fg="red")
            continue

        p.write_measurement_numpy(
            f"oa:{product.lower()}_contiguity",
            contiguity,
            geobox,
            nodata=255,
            overviews=None,
            expand_valid_data=False,
        )

        # masking the timedelta_data with contiguity mask to get max and min timedelta within the NBAR product
        # footprint for Landsat sensor. For Sentinel sensor, it inherits from level 1 yaml file
        if timedelta_data is not None and product.lower() == timedelta_product:
            valid_timedelta_data = numpy.ma.masked_where(
                contiguity == 0, timedelta_data)

            def offset_from_center(v: numpy.datetime64):
                return p.datetime + timedelta(microseconds=v.astype(float) *
                                              1_000_000.0)

            p.datetime_range = (
                offset_from_center(numpy.ma.min(valid_timedelta_data)),
                offset_from_center(numpy.ma.max(valid_timedelta_data)),
            )