Ejemplo n.º 1
0
def landsat_qa_clean_mask(dataset, platform, cover_types=['clear', 'water']):
    """
    Returns a clean_mask for `dataset` that masks out various types of terrain cover using the
    Landsat pixel_qa band. Note that Landsat masks specify what to keep, not what to remove.
    This means that using `cover_types=['clear', 'water']` should keep only clear land and water.

    See "pixel_qa band" here: https://landsat.usgs.gov/landsat-surface-reflectance-quality-assessment
    and Section 7 here: https://landsat.usgs.gov/sites/default/files/documents/lasrc_product_guide.pdf.

    Parameters
    ----------
    dataset: xarray.Dataset
        An xarray (usually produced by `datacube.load()`) that contains a `pixel_qa` data
        variable.
    platform: str
        A string denoting the platform to be used. Can be "LANDSAT_5", "LANDSAT_7", or
        "LANDSAT_8".
    cover_types: list
        A list of the cover types to include. Adding a cover type allows it to remain in the masked data.
        Cover types for all Landsat platforms include:
        ['fill', 'clear', 'water', 'shadow', 'snow', 'cloud', 'low_conf_cl', 'med_conf_cl', 'high_conf_cl'].

        'fill' removes "no_data" values, which indicates an absense of data. This value is -9999 for Landsat platforms.
        Generally, don't use 'fill'.
        'clear' allows only clear terrain. 'water' allows only water. 'shadow' allows only cloud shadows.
        'snow' allows only snow. 'cloud' allows only clouds, but note that it often only selects cloud boundaries.
        'low_conf_cl', 'med_conf_cl', and 'high_conf_cl' denote low, medium, and high confidence in cloud coverage.
        'low_conf_cl' is useful on its own for only removing clouds, however, 'clear' is usually better suited for this.
        'med_conf_cl' is useful in combination with 'low_conf_cl' to allow slightly heavier cloud coverage.
        Note that 'med_conf_cl' and 'cloud' are very similar.
        'high_conf_cl' is useful in combination with both 'low_conf_cl' and 'med_conf_cl'.

        For Landsat 8, there are more cover types: ['low_conf_cir', 'high_conf_cir', 'terrain_occ'].
        'low_conf_cir' and 'high_conf_cir' denote low and high confidence in cirrus clouds.
        'terrain_occ' allows only occluded terrain.

    Returns
    -------
    clean_mask: xarray.DataArray
        An xarray DataArray with the same number and order of coordinates as in `dataset`.
    """
    processing_options = {
        "LANDSAT_5": ls5_unpack_qa,
        "LANDSAT_7": ls7_unpack_qa,
        "LANDSAT_8": ls8_unpack_qa
    }

    clean_mask = None
    # Keep all specified cover types (e.g. 'clear', 'water'), so logically or the separate masks.
    for i, cover_type in enumerate(cover_types):
        cover_type_clean_mask = processing_options[platform](dataset.pixel_qa,
                                                             cover_type)
        clean_mask = cover_type_clean_mask if i == 0 else xr_or(
            clean_mask, cover_type_clean_mask)
    return clean_mask
Ejemplo n.º 2
0
def processing_task(self,
                    task_id=None,
                    geo_chunk_id=None,
                    geographic_chunk=None,
                    num_scn_per_chk=None,
                    **parameters):
    """Process a parameter set and save the results to disk.

    Uses the geographic and time chunk id to identify output products.
    **params is updated with time and geographic ranges then used to load data.
    the task model holds the iterative property that signifies whether the algorithm
    is iterative or if all data needs to be loaded at once.

    Args:
        task_id, geo_chunk_id: identification for the main task and what chunk this is processing
        geographic_chunk: range of latitude and longitude to load - dict with keys latitude, longitude
        num_scn_per_chk: A dictionary of the number of scenes per chunk for the baseline
                         and analysis extents. Used to determine task progress.
        parameters: all required kwargs to load data.

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    chunk_id = str(geo_chunk_id)
    task = SpectralAnomalyTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    if not os.path.exists(task.get_temp_path()):
        return None

    metadata = {}

    # For both the baseline and analysis time ranges for this
    # geographic chunk, load, calculate the spectral index, composite,
    # and filter the data according to user-supplied parameters -
    # recording where the data was out of the filter's range so we can
    # create the output product (an image).
    dc = DataAccessApi(config=task.config_path)
    updated_params = parameters
    updated_params.update(geographic_chunk)
    spectral_index = task.query_type.result_id
    composites = {}
    composites_out_of_range = {}
    no_data_value = task.satellite.no_data_value
    for composite_name in ['baseline', 'analysis']:
        if check_cancel_task(self, task): return

        # Use the corresponding time range for the baseline and analysis data.
        updated_params['time'] = \
            updated_params['baseline_time' if composite_name == 'baseline' else 'analysis_time']
        time_column_data = dc.get_dataset_by_extent(**updated_params)
        # If this geographic chunk is outside the data extents, return None.
        if len(time_column_data.dims) == 0: return None

        # Obtain the clean mask for the satellite.
        time_column_clean_mask = task.satellite.get_clean_mask_func()(
            time_column_data)
        measurements_list = task.satellite.measurements.replace(" ",
                                                                "").split(",")
        # Obtain the mask for valid Landsat values.
        time_column_invalid_mask = landsat_clean_mask_invalid(\
            time_column_data, platform=task.satellite.platform,
            collection=task.satellite.collection, level=task.satellite.level).values
        # Also exclude data points with the no_data value.
        no_data_mask = time_column_data[
            measurements_list[0]].values != no_data_value
        # Combine the clean masks.
        time_column_clean_mask = time_column_clean_mask | time_column_invalid_mask | no_data_mask

        # Obtain the composite.
        composite = task.get_processing_method()(
            time_column_data,
            clean_mask=time_column_clean_mask,
            no_data=task.satellite.no_data_value)
        # Obtain the mask for valid Landsat values.
        composite_invalid_mask = landsat_clean_mask_invalid(\
            composite, platform=task.satellite.platform,
            collection=task.satellite.collection, level=task.satellite.level).values
        # Also exclude data points with the no_data value via the compositing mask.
        composite_no_data_mask = composite[
            measurements_list[0]].values != no_data_value
        composite_clean_mask = composite_invalid_mask | composite_no_data_mask

        # Compute the spectral index for the composite.
        spec_ind_params = dict()
        if spectral_index == 'fractional_cover':
            spec_ind_params = dict(clean_mask=composite_clean_mask,
                                   no_data=no_data_value)
        spec_ind_result = spectral_indices_function_map[spectral_index](
            composite, **spec_ind_params)
        if spectral_index in ['ndvi', 'ndbi', 'ndwi', 'evi']:
            composite[spectral_index] = spec_ind_result
        else:  # Fractional Cover
            composite = xr.merge([composite, spec_ind_result])
            # Fractional Cover is supposed to have a range of [0, 100], with its bands -
            # 'bs', 'pv', and 'npv' - summing to 100. However, the function we use
            # can have the sum of those bands as high as 106.
            # frac_cov_min, frac_cov_max = spectral_indices_range_map[spectral_index]
            frac_cov_min, frac_cov_max = 0, 106
            for band in ['bs', 'pv', 'npv']:
                composite[band].values = \
                    np.interp(composite[band].values, (frac_cov_min, frac_cov_max),
                              spectral_indices_range_map[spectral_index])

        composites[composite_name] = composite

        # Determine where the composite is out of range.
        # We rename the resulting xarray.DataArray because calling to_netcdf()
        # on it at the end of this function will save it as a Dataset
        # with one data variable with the same name as the DataArray.
        if spectral_index in ['ndvi', 'ndbi', 'ndwi', 'evi']:
            composites_out_of_range[composite_name] = \
                xr_or(composite[spectral_index] < task.composite_threshold_min,
                      task.composite_threshold_max < composite[spectral_index]).rename(spectral_index)
        else:  # Fractional Cover
            # For fractional cover, a composite pixel is out of range if any of its
            # fractional cover bands are out of range.
            composites_out_of_range[composite_name] = xr_or(
                xr_or(
                    xr_or(composite['bs'] < task.composite_threshold_min,
                          task.composite_threshold_max < composite['bs']),
                    xr_or(composite['pv'] < task.composite_threshold_min,
                          task.composite_threshold_max < composite['pv'])),
                xr_or(composite['npv'] < task.composite_threshold_min,
                      task.composite_threshold_max <
                      composite['npv'])).rename(spectral_index)

        # Update the metadata with the current data (baseline or analysis).
        metadata = task.metadata_from_dataset(metadata, time_column_data,
                                              time_column_clean_mask,
                                              parameters)
        # Record task progress (baseline or analysis composite data obtained).
        task.scenes_processed = F(
            'scenes_processed') + num_scn_per_chk[composite_name]
        task.save(update_fields=['scenes_processed'])
    dc.close()

    if check_cancel_task(self, task): return
    # Create a difference composite.
    diff_composite = composites['analysis'] - composites['baseline']
    # Find where either the baseline or analysis composite was out of range for a pixel.
    composite_out_of_range = xr_or(*composites_out_of_range.values())
    # Find where either the baseline or analysis composite was no_data.
    if spectral_index in ['ndvi', 'ndbi', 'ndwi', 'evi']:
        composite_no_data = xr_or(
            composites['baseline'][measurements_list[0]] == no_data_value,
            composites['analysis'][measurements_list[0]] == no_data_value)
        if spectral_index == 'evi':  # EVI returns no_data for values outside [-1,1].
            composite_no_data = xr_or(
                composite_no_data,
                xr_or(composites['baseline'][spectral_index] == no_data_value,
                      composites['analysis'][spectral_index] == no_data_value))
    else:  # Fractional Cover
        composite_no_data = xr_or(
            xr_or(
                xr_or(composites['baseline']['bs'] == no_data_value,
                      composites['baseline']['pv'] == no_data_value),
                composites['baseline']['npv'] == no_data_value),
            xr_or(
                xr_or(composites['baseline']['bs'] == no_data_value,
                      composites['baseline']['pv'] == no_data_value),
                composites['baseline']['npv'] == no_data_value))
    composite_no_data = composite_no_data.rename(spectral_index)

    # Drop unneeded data variables.
    diff_composite = diff_composite.drop(measurements_list)

    if check_cancel_task(self, task): return

    composite_path = os.path.join(task.get_temp_path(), chunk_id + ".nc")
    export_xarray_to_netcdf(diff_composite, composite_path)
    composite_out_of_range_path = os.path.join(task.get_temp_path(),
                                               chunk_id + "_out_of_range.nc")
    logger.info("composite_out_of_range:" + str(composite_out_of_range))
    export_xarray_to_netcdf(composite_out_of_range,
                            composite_out_of_range_path)
    composite_no_data_path = os.path.join(task.get_temp_path(),
                                          chunk_id + "_no_data.nc")
    export_xarray_to_netcdf(composite_no_data, composite_no_data_path)
    return composite_path, composite_out_of_range_path, composite_no_data_path, \
           metadata, {'geo_chunk_id': geo_chunk_id}