def generate_product(
        self,
        dc,
        path_prefix,
        aoi,
        output_projection,
        year,
        platform,
        product,
        res,
        aoi_crs,
        **kwargs,
    ):

        dask_chunks = dict(time=10, x=600, y=600)

        query = create_base_query(aoi, res, output_projection, aoi_crs,
                                  dask_chunks)

        start_time = datetime.strptime(f"{year}-01-01", "%Y-%m-%d")
        end_time = datetime.strptime(f"{year}-12-31", "%Y-%m-%d")

        product_name = f"{map_satellite(platform)}_{product}_annual"

        data = dc.load(time=(start_time, end_time),
                       product=product_name,
                       **query)
        if is_dataset_empty(data):
            raise Exception("DataCube Load returned an empty Dataset." +
                            "Please check load parameters for Dataset!")
        data = data.rename({"x": "longitude", "y": "latitude"})
        data = data.mean(
            dim="time"
        )  # Should be safe as there should only ever be one entry and we just need to get rid of the dim
        file_name = path.join(path_prefix, f"archive_{year}.tiff")

        import_export.export_xarray_to_geotiff(
            data,
            file_name,
            crs=output_projection,
            x_coord="longitude",
            y_coord="latitude",
        )

        return [file_name]
    def generate_product(
        self,
        dc,
        path_prefix,
        aoi,
        output_projection,
        baseline_start_date,
        baseline_end_date,
        analysis_start_date,
        analysis_end_date,
        platform_base,
        platform_analysis,
        res,
        aoi_crs,
        **kwargs,
    ):

        ## Create datacube query

        dask_chunks = dict(time=10, x=500, y=500)

        query = create_base_query(aoi, res, output_projection, aoi_crs, dask_chunks)

        all_measurements = ["green", "red", "blue", "nir", "swir1", "swir2"]
        (
            baseline_product,
            baseline_measurement,
            baseline_water_product,
        ) = create_product_measurement(platform_base, all_measurements)
        (
            analysis_product,
            analysis_measurement,
            analysis_water_product,
        ) = create_product_measurement(platform_analysis, all_measurements)

        baseline_time_period = (baseline_start_date, baseline_end_date)
        analysis_time_period = (analysis_start_date, analysis_end_date)

        ## Create dask graph

        baseline_ds = dc.load(
            time=baseline_time_period,
            platform=platform_base,
            product=baseline_product,
            measurements=baseline_measurement,
            **query,
        )

        analysis_ds = dc.load(
            time=analysis_time_period,
            platform=platform_analysis,
            product=analysis_product,
            measurements=analysis_measurement,
            **query,
        )

        if is_dataset_empty(baseline_ds):
            raise Exception(
                "DataCube Load returned an empty Dataset."
                + "Please check load parameters for Baseline Dataset!"
            )

        if is_dataset_empty(analysis_ds):
            raise Exception(
                "DataCube Load returned an empty Dataset."
                + "Please check load parameters for Analysis Dataset!"
            )

        water_scenes_baseline = dc.load(
            product=baseline_water_product,
            measurements=["water_classification"],
            time=baseline_time_period,
            **query,
        )
        water_scenes_baseline = water_scenes_baseline.where(water_scenes_baseline >= 0)
        water_scenes_analysis = dc.load(
            product=analysis_water_product,
            measurements=["water_classification"],
            time=analysis_time_period,
            **query,
        )
        water_scenes_analysis = water_scenes_analysis.where(water_scenes_analysis >= 0)

        baseline_composite = geomedian(baseline_ds, baseline_product, all_measurements)
        analysis_composite = geomedian(analysis_ds, analysis_product, all_measurements)

        water_classes_base = water_scenes_baseline.where(water_scenes_baseline >= 0)
        water_classes_analysis = water_scenes_analysis.where(water_scenes_analysis >= 0)

        water_composite_base = water_classes_base.water_classification.mean(dim="time")
        water_composite_analysis = water_classes_analysis.water_classification.mean(
            dim="time"
        )

        baseline_composite = baseline_composite.rename(
            {"y": "latitude", "x": "longitude"}
        )
        water_composite_base = water_composite_base.rename(
            {"y": "latitude", "x": "longitude"}
        )
        analysis_composite = analysis_composite.rename(
            {"y": "latitude", "x": "longitude"}
        )
        water_composite_analysis = water_composite_analysis.rename(
            {"y": "latitude", "x": "longitude"}
        )

        # Spectral Parameter Anomaly

        parameter_baseline_composite = xr.map_blocks(
            frac_coverage_classify, baseline_composite, kwargs={"no_data": np.nan}
        )
        parameter_analysis_composite = xr.map_blocks(
            frac_coverage_classify, analysis_composite, kwargs={"no_data": np.nan}
        )

        frac_cov_baseline = parameter_baseline_composite.where(
            (water_composite_base <= 0.4) & (parameter_baseline_composite != -9999)
        )

        frac_cov_analysis = parameter_analysis_composite.where(
            (water_composite_analysis <= 0.4) & (parameter_analysis_composite != -9999)
        )
        parameter_anomaly = frac_cov_analysis - frac_cov_baseline

        ## Compute

        parameter_anomaly_output = parameter_anomaly.compute()

        ## Export products

        bs_output = parameter_anomaly_output.bs
        pv_output = parameter_anomaly_output.pv
        npv_output = parameter_anomaly_output.npv

        ## Write files

        result = []

        file_name = path.join(path_prefix, "land_change.tiff")
        import_export.export_xarray_to_geotiff(
            parameter_anomaly_output,
            file_name,
            crs=output_projection,
            x_coord="longitude",
            y_coord="latitude",
        )
        result.append(file_name)

        file_name = path.join(path_prefix, "bs_change.tiff")
        import_export.export_xarray_to_geotiff(
            bs_output,
            file_name,
            crs=output_projection,
            x_coord="longitude",
            y_coord="latitude",
        )
        result.append(file_name)

        file_name = path.join(path_prefix, "pv_change.tiff")
        import_export.export_xarray_to_geotiff(
            pv_output,
            file_name,
            crs=output_projection,
            x_coord="longitude",
            y_coord="latitude",
        )
        result.append(file_name)

        file_name = path.join(path_prefix, "npv_change.tiff")
        import_export.export_xarray_to_geotiff(
            npv_output,
            file_name,
            crs=output_projection,
            x_coord="longitude",
            y_coord="latitude",
        )
        result.append(file_name)

        return result
Ejemplo n.º 3
0
    def generate_product(
        self,
        dc,
        path_prefix,
        aoi,
        output_projection,
        start_date,
        end_date,
        platform,
        res,
        aoi_crs,
        mosaic_type,
        indices,
        **kwargs,
    ):

        ## Create datacube query

        dask_chunks = dict(time=1, x=2000, y=2000)

        query = create_base_query(aoi, res, output_projection, aoi_crs,
                                  dask_chunks)

        all_measurements = ["green", "red", "blue", "nir", "swir1", "swir2"]
        product, measurement, water_product = create_product_measurement(
            platform, all_measurements)

        time = (start_date, end_date)

        ## Create dask graph

        ds = dc.load(
            time=time,
            platform=platform,
            product=product,
            measurements=measurement,
            **query,
        )

        if is_dataset_empty(ds):
            raise Exception(
                "DataCube Load returned an empty Dataset." +
                "Please check load parameters for Baseline Dataset!")

        clean_mask = mask_good_quality(ds, product)

        # Perform mosaic

        mosaic_function = {
            "median": create_median_mosaic,
            "max": create_max_ndvi_mosaic,
            "mean": create_mean_mosaic,
            "min": create_min_ndvi_mosaic,
        }
        mosaic_compositor = mosaic_function[mosaic_type]
        mosaiced_composite = dask.delayed(mosaic_compositor)(
            ds, clean_mask=clean_mask)

        # Calculate Indices

        indices_function = {
            "NDVI": NDVI,
            "NDWI": NDWI,
            "EVI": EVI,
            "NDDI": NDDI
        }
        indices_compositor = indices_function[indices]
        indices_composite = indices_compositor(mosaiced_composite)

        ## Compute

        indices_composite = indices_composite.compute()

        ## Write files

        file_name = path.join(path_prefix, "indices_composite.tiff")
        ds = xr.DataArray.to_dataset(indices_composite, dim=None, name=indices)
        import_export.export_xarray_to_geotiff(
            ds,
            file_name,
            bands=[indices],
            crs=output_projection,
            x_coord="x",
            y_coord="y",
        )

        return [file_name]
    def generate_product(
        self,
        dc,
        path_prefix,
        aoi,
        output_projection,
        start_date,
        end_date,
        platform,
        res,
        aoi_crs,
        **kwargs,
    ):

        ## Create datacube query

        dask_chunks = dict(time=10, x=1000, y=1000)

        query = create_base_query(aoi, res, output_projection, aoi_crs,
                                  dask_chunks)

        all_measurements = ["green", "red", "blue", "nir", "swir1", "swir2"]
        product, measurement, water_product = create_product_measurement(
            platform, all_measurements)

        time = (start_date, end_date)

        ## Create dask graph

        ds = dc.load(
            time=time,
            platform=platform,
            product=product,
            measurements=measurement,
            **query,
        )

        if is_dataset_empty(ds):
            raise Exception(
                "DataCube Load returned an empty Dataset." +
                "Please check load parameters for Baseline Dataset!")

        water_scenes = dc.load(
            product=water_product,
            measurements=["water_classification"],
            time=time,
            **query,
        )

        # Set land to no_data
        water_dataset = water_scenes.where(water_scenes > 0)

        good_quality = mask_good_quality(ds, product)
        ds_clear = ds.where(good_quality)
        ds_clear_land = ds_clear.where(water_dataset.water_classification > 0)
        tsm_dataset = xr.map_blocks(tsm, ds_clear_land)

        mean_tsm = tsm_dataset.mean(dim=["time"])
        max_tsm = tsm_dataset.max(dim=["time"])
        min_tsm = tsm_dataset.min(dim=["time"])

        ## Compute

        mean_tsm, max_tsm, min_tsm = dask.compute(mean_tsm, max_tsm, min_tsm)

        ## Write files

        result = []

        file_name = path.join(path_prefix, "mean_tsm.tiff")
        import_export.export_xarray_to_geotiff(
            mean_tsm,
            file_name,
            crs=output_projection,
            x_coord="x",
            y_coord="y",
        )
        result.append(file_name)

        file_name = path.join(path_prefix, "min_tsm.tiff")
        import_export.export_xarray_to_geotiff(
            min_tsm,
            file_name,
            crs=output_projection,
            x_coord="x",
            y_coord="y",
        )
        result.append(file_name)

        file_name = path.join(path_prefix, "max_tsm.tiff")
        import_export.export_xarray_to_geotiff(
            max_tsm,
            file_name,
            crs=output_projection,
            x_coord="x",
            y_coord="y",
        )
        result.append(file_name)

        return result
    def generate_product(
        self,
        dc,
        path_prefix,
        aoi,
        output_projection,
        start_date,
        end_date,
        platform,
        res,
        aoi_crs,
        **kwargs,
    ):

        ## Create datacube query

        dask_chunks = dict(time=10, x=600, y=600)

        query = create_base_query(aoi, res, output_projection, aoi_crs, dask_chunks)

        all_measurements = ["green", "red", "blue", "nir", "swir1", "swir2"]
        product, measurement, water_product = create_product_measurement(
            platform, all_measurements
        )

        time = (start_date, end_date)

        ## Create dask graph

        ds = dc.load(
            time=time,
            platform=platform,
            product=product,
            measurements=measurement,
            **query,
        )

        if is_dataset_empty(ds):
            raise Exception(
                "DataCube Load returned an empty Dataset."
                + "Please check load parameters for Baseline Dataset!"
            )

        water_scenes = dc.load(
            product=water_product,
            measurements=["water_classification"],
            time=time,
            **query,
        )
        water_scenes = water_scenes.where(water_scenes >= 0)

        water_composite_mean = water_scenes.water_classification.mean(dim="time")
        water_composite_mean = water_composite_mean.rename(
            {"x": "longitude", "y": "latitude"}
        )

        land_composite = geomedian(ds, product, all_measurements)
        land_composite = land_composite.rename({"x": "longitude", "y": "latitude"})

        # Fractional Cover Classification

        frac_classes = xr.map_blocks(
            frac_coverage_classify, land_composite, kwargs={"no_data": np.nan}
        )

        # Mask to remove clounds, cloud shadow, and water.
        frac_cov_masked = frac_classes.where(
            (frac_classes != np.nan) & (water_composite_mean <= 0.4)
        )

        ## Compute

        fractional_cover_output = frac_cov_masked.compute()

        ## Write file

        file_name = path.join(path_prefix, "fractional_cover.tiff")
        import_export.export_xarray_to_geotiff(
            fractional_cover_output,
            file_name,
            crs=output_projection,
            x_coord="longitude",
            y_coord="latitude",
        )

        return [file_name]
Ejemplo n.º 6
0
    def generate_product(
        self,
        dc,
        path_prefix,
        aoi,
        output_projection,
        baseline_start_date,
        baseline_end_date,
        analysis_start_date,
        analysis_end_date,
        platform_base,
        platform_analysis,
        res,
        aoi_crs,
        mosaic_type,
        **kwargs,
    ):

        ## Create datacube query

        dask_chunks = dict(time=40, x=2000, y=2000)

        query = create_base_query(aoi, res, output_projection, aoi_crs,
                                  dask_chunks)

        all_measurements = ["green", "red", "blue", "nir", "swir1", "swir2"]
        (
            baseline_product,
            baseline_measurement,
            baseline_water_product,
        ) = create_product_measurement(platform_base, all_measurements)
        (
            analysis_product,
            analysis_measurement,
            analysis_water_product,
        ) = create_product_measurement(platform_analysis, all_measurements)

        baseline_time_period = (baseline_start_date, baseline_end_date)
        analysis_time_period = (analysis_start_date, analysis_end_date)

        ## Create dask graph

        baseline_ds = dc.load(
            time=baseline_time_period,
            platform=platform_base,
            product=baseline_product,
            measurements=baseline_measurement,
            **query,
        )

        analysis_ds = dc.load(
            time=analysis_time_period,
            platform=platform_analysis,
            product=analysis_product,
            measurements=analysis_measurement,
            **query,
        )

        if is_dataset_empty(baseline_ds):
            raise Exception(
                "DataCube Load returned an empty Dataset." +
                "Please check load parameters for Baseline Dataset!")

        if is_dataset_empty(analysis_ds):
            raise Exception(
                "DataCube Load returned an empty Dataset." +
                "Please check load parameters for Analysis Dataset!")

        water_scenes_baseline = dc.load(
            product=baseline_water_product,
            measurements=["water_classification"],
            time=baseline_time_period,
            **query,
        )
        water_scenes_analysis = dc.load(
            product=analysis_water_product,
            measurements=["water_classification"],
            time=analysis_time_period,
            **query,
        )

        b_good_quality = mask_good_quality(baseline_ds, baseline_product)
        a_good_quality = mask_good_quality(analysis_ds, analysis_product)

        baseline_ds = baseline_ds.where(b_good_quality)
        analysis_ds = analysis_ds.where(a_good_quality)

        mosaic_function = {
            "median": create_median_mosaic,
            "mean": create_mean_mosaic,
            "max_ndvi": create_max_ndvi_mosaic,
        }

        new_compositor = mosaic_function[mosaic_type]

        if mosaic_type == "median" or mosaic_type == "mean":
            # the mean and medan functions work automatically with dask without using `dask.delayed`
            # because they exclusively use xarray functions which already support dask.
            # this gives us a ~20% time saving on small datasets
            baseline_composite = new_compositor(baseline_ds,
                                                clean_mask=b_good_quality)
            analysis_composite = new_compositor(analysis_ds,
                                                clean_mask=a_good_quality)
        else:
            baseline_composite = dask.delayed(new_compositor)(
                baseline_ds, clean_mask=b_good_quality)
            analysis_composite = dask.delayed(new_compositor)(
                analysis_ds, clean_mask=a_good_quality)

        water_classes_base = water_scenes_baseline.where(
            water_scenes_baseline >= 0)
        water_classes_analysis = water_scenes_analysis.where(
            water_scenes_analysis >= 0)

        water_composite_base = water_classes_base.water_classification.mean(
            dim="time")
        water_composite_analysis = water_classes_analysis.water_classification.mean(
            dim="time")

        baseline_composite = baseline_composite.where(
            (baseline_composite != np.nan) & (water_composite_base == 0))
        analysis_composite = analysis_composite.where(
            (analysis_composite != np.nan) & (water_composite_analysis == 0))

        ndvi_baseline_composite = NDVI(baseline_composite)
        ndvi_analysis_composite = NDVI(analysis_composite)

        ndvi_anomaly = ndvi_analysis_composite - ndvi_baseline_composite

        ## Compute

        ndvi_anomaly = ndvi_anomaly.compute()

        ## Write file

        file_name = path.join(path_prefix, "ndvi_anomaly.tiff")
        ndvi_anomaly_export = xr.DataArray.to_dataset(ndvi_anomaly,
                                                      dim=None,
                                                      name="ndvi_anomaly")
        import_export.export_xarray_to_geotiff(
            ndvi_anomaly_export,
            file_name,
            bands=["ndvi_anomaly"],
            crs=output_projection,
            x_coord="x",
            y_coord="y",
        )

        return [file_name]
Ejemplo n.º 7
0
    def generate_product(
        self,
        dc,
        path_prefix,
        aoi,
        output_projection,
        baseline_start_date,
        baseline_end_date,
        analysis_start_date,
        analysis_end_date,
        platform_base,
        platform_analysis,
        res,
        aoi_crs,
        **kwargs,
    ):

        ## Create datacube query

        dask_chunks = dict(time=1, x=2000, y=2000)

        query = create_base_query(aoi, res, output_projection, aoi_crs,
                                  dask_chunks)

        all_measurements = ["green", "red", "blue", "nir", "swir1", "swir2"]
        (
            _baseline_product,
            _baseline_measurement,
            baseline_water_product,
        ) = create_product_measurement(platform_base, all_measurements)
        (
            _analysis_product,
            _analysis_measurement,
            analysis_water_product,
        ) = create_product_measurement(platform_analysis, all_measurements)

        baseline_time_period = (baseline_start_date, baseline_end_date)
        analysis_time_period = (analysis_start_date, analysis_end_date)

        ## Create dask graph

        baseline_ds = dc.load(
            time=baseline_time_period,
            platform=platform_base,
            product=baseline_water_product,
            measurements=["water_classification"],
            **query,
        )

        analysis_ds = dc.load(
            time=analysis_time_period,
            platform=platform_analysis,
            product=analysis_water_product,
            measurements=["water_classification"],
            **query,
        )

        if is_dataset_empty(baseline_ds):
            raise Exception(
                "DataCube Load returned an empty Dataset." +
                "Please check load parameters for Baseline Dataset!")

        if is_dataset_empty(analysis_ds):
            raise Exception(
                "DataCube Load returned an empty Dataset." +
                "Please check load parameters for Analysis Dataset!")

        wc_baseline = baseline_ds.where(baseline_ds >= 0)
        wc_analysis = analysis_ds.where(analysis_ds >= 0)

        wc_baseline_mean = wc_baseline.water_classification.mean(dim="time")
        wc_analysis_mean = wc_analysis.water_classification.mean(dim="time")

        waterpres_prob = 0.3
        T0_nd_water = np.isnan(wc_baseline_mean)
        wc_baseline_rc_int = wc_baseline_mean.where(
            (wc_baseline_mean < waterpres_prob) | (T0_nd_water == True),
            1)  # fix > prob to water
        wc_baseline_rc = wc_baseline_rc_int.where(
            (wc_baseline_rc_int >= waterpres_prob) | (T0_nd_water == True),
            0)  # fix < prob to no water

        T1_nd_water = np.isnan(wc_analysis_mean)
        wc_analysis_rc_int = wc_analysis_mean.where(
            (wc_analysis_mean < waterpres_prob) | (T1_nd_water == True),
            1)  # fix > prob to water
        wc_analysis_rc = wc_analysis_rc_int.where(
            (wc_analysis_rc_int >= waterpres_prob) | (T1_nd_water == True),
            0)  # fix < prob to no water

        # Outputs

        difference = wc_analysis_rc - wc_baseline_rc
        difference_range = wc_analysis_mean - wc_baseline_mean

        ## Compute

        difference_output, difference_range_output = dask.compute(
            difference, difference_range)

        ## Write files

        result = []

        file_name = path.join(path_prefix, "difference_range.tiff")
        import_export.export_xarray_to_geotiff(
            difference_range_output,
            file_name,
            crs=output_projection,
            x_coord="x",
            y_coord="y",
        )
        result.append(file_name)

        file_name = path.join(path_prefix, "difference.tiff")
        import_export.export_xarray_to_geotiff(
            difference_output,
            file_name,
            crs=output_projection,
            x_coord="x",
            y_coord="y",
        )
        result.append(file_name)

        return result
    def generate_product(
        self,
        dc,
        path_prefix,
        aoi,
        output_projection,
        baseline_start_date,
        baseline_end_date,
        analysis_start_date,
        analysis_end_date,
        platform_base,
        platform_analysis,
        res,
        aoi_crs,
        mosaic_type,
        indices,
        minC,
        maxC,
        **kwargs,
    ):

        ## Create datacube query

        dask_chunks = dict(time=1, x=1000, y=1000)

        query = create_base_query(aoi, res, output_projection, aoi_crs,
                                  dask_chunks)

        all_measurements = ["green", "red", "blue", "nir", "swir1", "swir2"]
        (
            baseline_product,
            baseline_measurement,
            baseline_water_product,
        ) = create_product_measurement(platform_base, all_measurements)
        (
            analysis_product,
            analysis_measurement,
            analysis_water_product,
        ) = create_product_measurement(platform_analysis, all_measurements)

        baseline_time_period = (baseline_start_date, baseline_end_date)
        analysis_time_period = (analysis_start_date, analysis_end_date)

        ## Create dask graph

        baseline_ds = dc.load(
            time=baseline_time_period,
            platform=platform_base,
            product=baseline_product,
            measurements=baseline_measurement,
            **query,
        )

        analysis_ds = dc.load(
            time=analysis_time_period,
            platform=platform_analysis,
            product=analysis_product,
            measurements=analysis_measurement,
            **query,
        )

        if is_dataset_empty(baseline_ds):
            raise Exception(
                "DataCube Load returned an empty Dataset." +
                "Please check load parameters for Baseline Dataset!")

        if is_dataset_empty(analysis_ds):
            raise Exception(
                "DataCube Load returned an empty Dataset." +
                "Please check load parameters for Analysis Dataset!")

        baseline_clean_mask = mask_good_quality(baseline_ds, baseline_product)
        analysis_clean_mask = mask_good_quality(analysis_ds, analysis_product)

        xx_data_b = baseline_ds[all_measurements]
        xx_data_a = analysis_ds[all_measurements]

        baseline_ds_masked = odc.algo.keep_good_only(xx_data_b,
                                                     where=baseline_clean_mask)
        analysis_ds_masked = odc.algo.keep_good_only(xx_data_a,
                                                     where=analysis_clean_mask)

        if mosaic_type == "geomedian":
            baseline_composite = geomedian(baseline_ds_masked,
                                           baseline_product, all_measurements)
            analysis_composite = geomedian(analysis_ds_masked,
                                           analysis_product, all_measurements)
        else:
            mosaic_function = {
                "median": create_median_mosaic,
                "max": create_max_ndvi_mosaic,
                "mean": create_mean_mosaic,
            }
            new_compositor = mosaic_function[mosaic_type]
            baseline_composite = dask.delayed(new_compositor)(
                baseline_ds_masked,
                clean_mask=baseline_clean_mask,
                no_data=np.nan)
            analysis_composite = dask.delayed(new_compositor)(
                analysis_ds_masked,
                clean_mask=analysis_clean_mask,
                no_data=np.nan)

        water_scenes_baseline = dc.load(
            product=baseline_water_product,
            measurements=["water_classification"],
            time=baseline_time_period,
            **query,
        )
        water_scenes_baseline = water_scenes_baseline.where(
            water_scenes_baseline >= 0)
        water_scenes_analysis = dc.load(
            product=analysis_water_product,
            measurements=["water_classification"],
            time=analysis_time_period,
            **query,
        )
        water_scenes_analysis = water_scenes_analysis.where(
            water_scenes_analysis >= 0)

        baseline_composite = baseline_composite.rename({
            "y": "latitude",
            "x": "longitude"
        })
        analysis_composite = analysis_composite.rename({
            "y": "latitude",
            "x": "longitude"
        })

        # Spectral Parameter

        parameter_baseline_composite = createparametercomposite(
            indices, baseline_composite)
        parameter_analysis_composite = createparametercomposite(
            indices, analysis_composite)

        # Generate water mask

        water_composite_base = dask.delayed(
            water_scenes_baseline.water_classification.mean(dim="time"))
        water_composite_analysis = dask.delayed(
            water_scenes_analysis.water_classification.mean(dim="time"))

        # Apply water mask

        vegetation_baseline = parameter_baseline_composite.where(
            water_composite_base.values <= 0.4).where(
                parameter_baseline_composite != -9999)
        vegetation_analysis = parameter_analysis_composite.where(
            water_composite_analysis.values <= 0.4).where(
                parameter_analysis_composite != -9999)

        parameter_anomaly = vegetation_analysis - vegetation_baseline

        ## Compute

        parameter_anomaly_output = parameter_anomaly.compute()

        ## Anomaly Threshold Product

        no_data_mask = np.isnan(parameter_anomaly_output)
        a = parameter_anomaly_output
        b = a.where((a < maxC) | (no_data_mask == True), 200)
        c = b.where((b > minC) | (no_data_mask == True), 300)
        d = c.where(((c >= maxC) | (c <= minC)) | (no_data_mask == True), 100)
        param_thres_output = xr.DataArray.to_dataset(d,
                                                     dim=None,
                                                     name="param_thres")

        ## Write files

        result = []

        file_name = path.join(path_prefix, "veg_change.tiff")
        import_export.export_xarray_to_geotiff(
            parameter_anomaly_output,
            file_name,
            crs=output_projection,
            x_coord="longitude",
            y_coord="latitude",
        )
        result.append(file_name)

        file_name = path.join(path_prefix, "param_thres.tiff")
        import_export.export_xarray_to_geotiff(
            param_thres_output,
            file_name,
            bands=["param_thres"],
            crs=output_projection,
            x_coord="longitude",
            y_coord="latitude",
        )
        result.append(file_name)

        return result
    def generate_product(
        self,
        dc,
        path_prefix,
        aoi,
        output_projection,
        start_date,
        end_date,
        platform,
        res,
        aoi_crs,
        **kwargs,
    ):

        ## Create datacube query

        dask_chunks = dict(time=1, x=2000, y=2000)

        query = create_base_query(aoi, res, output_projection, aoi_crs,
                                  dask_chunks)

        all_measurements = ["green", "red", "blue", "nir", "swir1", "swir2"]
        _product, _measurement, water_product = create_product_measurement(
            platform, all_measurements)

        time = (start_date, end_date)

        ## Create dask graph

        ds = dc.load(
            time=time,
            platform=platform,
            product=water_product,
            group_by="solar_day",
            measurements=["water"],
            **query,
        )

        if is_dataset_empty(ds):
            raise Exception(
                "DataCube Load returned an empty Dataset." +
                "Please check load parameters for Baseline Dataset!")

        water = ds.where(ds != -9999)
        water_composite_mean = water.water.mean(dim="time")

        ## Compute

        water_composite_mean_output = water_composite_mean.compute()

        ## Write files

        file_name = path.join(path_prefix, "water.tiff")
        import_export.export_xarray_to_geotiff(
            water_composite_mean_output,
            file_name,
            bands=["water"],
            crs=output_projection,
            x_coord="x",
            y_coord="y",
        )

        return [file_name]