Beispiel #1
0
def recombine_time_chunks(self, chunks, task_id=None):
    """Recombine processed chunks over the time index.

    Open time chunked processed datasets and recombine them using the same function
    that was used to process them. This assumes an iterative algorithm - if it is not, then it will
    simply return the data again.

    Args:
        chunks: list of the return from the processing_task function - path, metadata, and {chunk ids}

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    task = CustomMosaicToolTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    # sorting based on time id - earlier processed first as they're incremented e.g. 0, 1, 2..
    chunks = chunks if isinstance(chunks, list) else [chunks]
    chunks = [chunk for chunk in chunks if chunk is not None]
    if len(chunks) == 0:
        return None
    total_chunks = sorted(chunks, key=lambda x: x[0])
    geo_chunk_id = total_chunks[0][2]['geo_chunk_id']
    time_chunk_id = total_chunks[0][2]['time_chunk_id']
    metadata = {}

    def generate_animation(index, combined_data):
        base_index = (task.get_chunk_size()['time'] if
                      task.get_chunk_size()['time'] is not None else 1) * index
        for index in range((task.get_chunk_size()['time'] if
                            task.get_chunk_size()['time'] is not None else 1)):
            path = os.path.join(task.get_temp_path(),
                                "animation_{}.nc".format(base_index + index))
            if os.path.exists(path):
                animated_data = xr.open_dataset(path)
                if task.animated_product.animation_id == "cumulative":
                    animated_data = xr.concat([animated_data], 'time')
                    animated_data['time'] = [0]
                    clear_mask = task.satellite.get_clean_mask_func()(
                        animated_data)
                    animated_data = task.get_processing_method()(
                        animated_data,
                        clean_mask=clear_mask,
                        intermediate_product=combined_data,
                        no_data=task.satellite.no_data_value)
                path = os.path.join(
                    task.get_temp_path(),
                    "animation_{}.png".format(base_index + index))
                write_png_from_xr(path,
                                  animated_data,
                                  bands=[
                                      task.query_type.red,
                                      task.query_type.green,
                                      task.query_type.blue
                                  ],
                                  scale=task.satellite.get_scale(),
                                  no_data=task.satellite.no_data_value)

    combined_data = None
    for index, chunk in enumerate(total_chunks):
        metadata.update(chunk[1])
        data = xr.open_dataset(chunk[0])
        if combined_data is None:
            if task.animated_product.animation_id != "none":
                generate_animation(index, combined_data)
            combined_data = data
            continue
        #give time an index to keep compositing from breaking.
        data = xr.concat([data], 'time')
        data['time'] = [0]
        clear_mask = task.satellite.get_clean_mask_func()(data)
        combined_data = task.get_processing_method()(
            data,
            clean_mask=clear_mask,
            intermediate_product=combined_data,
            no_data=task.satellite.no_data_value)
        if check_cancel_task(self, task): return
        # if we're animating, combine it all and save to disk.
        if task.animated_product.animation_id != "none":
            generate_animation(index, combined_data)

    path = os.path.join(task.get_temp_path(),
                        "recombined_time_{}.nc".format(geo_chunk_id))
    export_xarray_to_netcdf(combined_data, path)
    logger.info("Done combining time chunks for geo: " + str(geo_chunk_id))
    return path, metadata, {
        'geo_chunk_id': geo_chunk_id,
        'time_chunk_id': time_chunk_id
    }
Beispiel #2
0
def create_output_products(self, data, task_id=None):
    """Create the final output products for this algorithm.

    Open the final dataset and metadata and generate all remaining metadata.
    Convert and write the dataset to variuos formats and register all values in the task model
    Update status and exit.

    Args:
        data: tuple in the format of processing_task function - path, metadata, and {chunk ids}
    """
    task = TsmTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    full_metadata = data[1]
    dataset = xr.open_dataset(data[0]).astype('float64')
    dataset['variability'] = dataset['max'] - dataset['normalized_data']
    dataset['wofs'] = dataset.wofs / dataset.wofs_total_clean
    dataset = dataset.where(~xr_nan(dataset), 0)
    dataset_masked = mask_water_quality(dataset, dataset.wofs)

    task.result_path = os.path.join(task.get_result_path(), "tsm.png")
    task.clear_observations_path = os.path.join(task.get_result_path(), "clear_observations.png")
    task.water_percentage_path = os.path.join(task.get_result_path(), "water_percentage.png")
    task.data_path = os.path.join(task.get_result_path(), "data_tif.tif")
    task.data_netcdf_path = os.path.join(task.get_result_path(), "data_netcdf.nc")
    task.animation_path = os.path.join(task.get_result_path(),
                                       "animation.gif") if task.animated_product.animation_id != 'none' else ""
    task.final_metadata_from_dataset(dataset_masked)
    task.metadata_from_dict(full_metadata)

    bands = [task.query_type.data_variable, 'total_clean', 'wofs']
    band_paths = [task.result_path, task.clear_observations_path, task.water_percentage_path]

    export_xarray_to_netcdf(dataset_masked, task.data_netcdf_path)

    write_geotiff_from_xr(task.data_path, dataset_masked, bands=bands, no_data=task.satellite.no_data_value)

    for band, band_path in zip(bands, band_paths):
        write_single_band_png_from_xr(
            band_path,
            dataset_masked,
            band,
            color_scale=task.color_scales[band],
            fill_color='black',
            interpolate=False,
            no_data=task.satellite.no_data_value)

    if task.animated_product.animation_id != "none":
        with imageio.get_writer(task.animation_path, mode='I', duration=1.0) as writer:
            valid_range = range(len(full_metadata))
            for index in valid_range:
                path = os.path.join(task.get_temp_path(), "animation_final_{}.nc".format(index))
                if os.path.exists(path):
                    png_path = os.path.join(task.get_temp_path(), "animation_{}.png".format(index))
                    animated_data = mask_water_quality(
                        xr.open_dataset(path).astype('float64'),
                        dataset.wofs) if task.animated_product.animation_id != "scene" else xr.open_dataset(
                            path)
                    write_single_band_png_from_xr(
                        png_path,
                        animated_data,
                        task.animated_product.data_variable,
                        color_scale=task.color_scales[task.animated_product.data_variable],
                        fill_color='black',
                        interpolate=False,
                        no_data=task.satellite.no_data_value)
                    image = imageio.imread(png_path)
                    writer.append_data(image)

    dates = list(map(lambda x: datetime.strptime(x, "%m/%d/%Y"), task._get_field_as_list('acquisition_list')))
    if len(dates) > 1:
        task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
        create_2d_plot(
            task.plot_path,
            dates=dates,
            datasets=task._get_field_as_list('clean_pixel_percentages_per_acquisition'),
            data_labels="Clean Pixel Percentage (%)",
            titles="Clean Pixel Percentage Per Acquisition")

    logger.info("All products created.")
    task.update_bounds_from_dataset(dataset_masked)
    task.complete = True
    task.execution_end = datetime.now()
    task.update_status("OK", "All products have been generated. Your result will be loaded on the map.")
    return True
Beispiel #3
0
def recombine_geographic_chunks(self, chunks, task_id=None):
    """Recombine processed data over the geographic indices

    For each geographic chunk process spawned by the main task, open the resulting dataset
    and combine it into a single dataset. Combine metadata as well, writing to disk.

    Args:
        chunks: list of the return from the processing_task function - path, metadata, and {chunk ids}
        num_scn_per_chk: The number of scenes per chunk. Used to determine task progress.

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    task = CustomMosaicToolTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    total_chunks = [chunks] if not isinstance(chunks, list) else chunks
    total_chunks = [chunk for chunk in total_chunks if chunk is not None]
    if len(total_chunks) == 0:
        return None
    geo_chunk_id = total_chunks[0][2]['geo_chunk_id']
    time_chunk_id = total_chunks[0][2]['time_chunk_id']

    metadata = {}
    chunk_data = []
    for index, chunk in enumerate(total_chunks):
        metadata = task.combine_metadata(metadata, chunk[1])
        chunk_data.append(xr.open_dataset(chunk[0]))
    combined_data = combine_geographic_chunks(chunk_data)

    # if we're animating, combine it all and save to disk.
    if task.animated_product.animation_id != "none":
        base_index = (task.get_chunk_size()['time'] if task.get_chunk_size()
                      ['time'] is not None else 1) * time_chunk_id
        for index in range((task.get_chunk_size()['time'] if
                            task.get_chunk_size()['time'] is not None else 1)):
            animated_data = []
            for chunk in total_chunks:
                geo_chunk_index = chunk[2]['geo_chunk_id']
                # if we're animating, combine it all and save to disk.
                path = os.path.join(
                    task.get_temp_path(),
                    "animation_{}_{}.nc".format(str(geo_chunk_index),
                                                str(base_index + index)))
                if os.path.exists(path):
                    animated_data.append(xr.open_dataset(path))
            path = os.path.join(task.get_temp_path(),
                                "animation_{}.nc".format(base_index + index))
            if len(animated_data) > 0:
                export_xarray_to_netcdf(
                    combine_geographic_chunks(animated_data), path)

    path = os.path.join(task.get_temp_path(),
                        "recombined_geo_{}.nc".format(time_chunk_id))
    export_xarray_to_netcdf(combined_data, path)
    logger.info("Done combining geographic chunks for time: " +
                str(time_chunk_id))
    return path, metadata, {
        'geo_chunk_id': geo_chunk_id,
        'time_chunk_id': time_chunk_id
    }
Beispiel #4
0
def processing_task(self,
                    task_id=None,
                    geo_chunk_id=None,
                    time_chunk_id=None,
                    geographic_chunk=None,
                    time_chunk=None,
                    **parameters):
    """Process a parameter set and save the results to disk.

    Uses the geographic and time chunk id to identify output products.
    **params is updated with time and geographic ranges then used to load data.
    the task model holds the iterative property that signifies whether the algorithm
    is iterative or if all data needs to be loaded at once.

    Args:
        task_id, geo_chunk_id, time_chunk_id: identification for the main task and what chunk this is processing
        geographic_chunk: range of latitude and longitude to load - dict with keys latitude, longitude
        time_chunk: list of acquisition dates
        parameters: all required kwargs to load data.

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    chunk_id = "_".join([str(geo_chunk_id), str(time_chunk_id)])
    task = TsmTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    logger.info("Starting chunk: " + chunk_id)
    if not os.path.exists(task.get_temp_path()):
        return None

    metadata = {}

    times = list(
        map(_get_datetime_range_containing, time_chunk)
        if task.get_iterative() else [_get_datetime_range_containing(time_chunk[0], time_chunk[-1])])
    dc = DataAccessApi(config=task.config_path)
    updated_params = parameters
    updated_params.update(geographic_chunk)
    water_analysis = None
    tsm_analysis = None
    combined_data = None
    base_index = (task.get_chunk_size()['time'] if task.get_chunk_size()['time'] is not None else 1) * time_chunk_id
    for time_index, time in enumerate(times):
        updated_params.update({'time': time})
        data = dc.get_stacked_datasets_by_extent(**updated_params)

        if check_cancel_task(self, task): return

        if data is None:
            logger.info("Empty chunk.")
            continue
        if 'time' not in data:
            logger.info("Invalid chunk.")
            continue

        clear_mask = task.satellite.get_clean_mask_func()(data)

        # Ensure data variables have the range of Landsat 7 Collection 1 Level 2
        # since the color scales are tailored for that dataset.
        platform = task.satellite.platform
        collection = task.satellite.collection
        level = task.satellite.level
        if (platform, collection) != ('LANDSAT_7', 'c1'):
            data = \
                convert_range(data, from_platform=platform, 
                            from_collection=collection, from_level=level,
                            to_platform='LANDSAT_7', to_collection='c1', to_level='l2')
        
        wofs_data = task.get_processing_method()(data,
                                                 clean_mask=clear_mask,
                                                 no_data=task.satellite.no_data_value)
        
        water_analysis = perform_timeseries_analysis(
            wofs_data, 'wofs', intermediate_product=water_analysis, no_data=task.satellite.no_data_value)

        # clear_mask.data[(data.swir2.values > 100) | (wofs_data.wofs.values == 0)] = False
        tsm_data = tsm(data, clean_mask=clear_mask, no_data=task.satellite.no_data_value)
        tsm_analysis = perform_timeseries_analysis(
            tsm_data, 'tsm', intermediate_product=tsm_analysis, no_data=task.satellite.no_data_value)

        if check_cancel_task(self, task): return

        combined_data = tsm_analysis
        combined_data['wofs'] = water_analysis.total_data
        combined_data['wofs_total_clean'] = water_analysis.total_clean

        metadata = task.metadata_from_dataset(metadata, tsm_data, clear_mask, updated_params)
        if task.animated_product.animation_id != "none":
            path = os.path.join(task.get_temp_path(),
                                "animation_{}_{}.nc".format(str(geo_chunk_id), str(base_index + time_index)))
            animated_data = tsm_data.isel(
                time=0, drop=True) if task.animated_product.animation_id == "scene" else combined_data
            export_xarray_to_netcdf(animated_data, path)

        task.scenes_processed = F('scenes_processed') + 1
        task.save(update_fields=['scenes_processed'])
    if combined_data is None:
        return None
    path = os.path.join(task.get_temp_path(), chunk_id + ".nc")
    export_xarray_to_netcdf(combined_data, path)
    dc.close()
    logger.info("Done with chunk: " + chunk_id)
    return path, metadata, {'geo_chunk_id': geo_chunk_id, 'time_chunk_id': time_chunk_id}
Beispiel #5
0
def recombine_time_chunks(self, chunks, task_id=None):
    """Recombine processed chunks over the time index.

    Open time chunked processed datasets and recombine them using the same function
    that was used to process them. This assumes an iterative algorithm - if it is not, then it will
    simply return the data again.

    Args:
        chunks: list of the return from the processing_task function - path, metadata, and {chunk ids}

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    task = TsmTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    # sorting based on time id - earlier processed first as they're incremented e.g. 0, 1, 2..
    chunks = chunks if isinstance(chunks, list) else [chunks]
    chunks = [chunk for chunk in chunks if chunk is not None]
    if len(chunks) == 0:
        return None
    total_chunks = sorted(chunks, key=lambda x: x[0])
    geo_chunk_id = total_chunks[0][2]['geo_chunk_id']
    time_chunk_id = total_chunks[0][2]['time_chunk_id']
    metadata = {}

    def combine_intermediates(dataset, dataset_intermediate):
        """
        functions used to combine time sliced data after being combined geographically.
        This compounds the results of the time slice and recomputes the normalized data.
        """
        # total data/clean refers to tsm
        dataset_intermediate['total_data'] += dataset.total_data
        dataset_intermediate['total_clean'] += dataset.total_clean
        dataset_intermediate['normalized_data'] = dataset_intermediate['total_data'] / dataset_intermediate[
            'total_clean']
        dataset_intermediate['min'] = xr.concat(
            [dataset_intermediate['min'], dataset['min']], dim='time').min(
                dim='time', skipna=True)
        dataset_intermediate['max'] = xr.concat(
            [dataset_intermediate['max'], dataset['max']], dim='time').max(
                dim='time', skipna=True)
        dataset_intermediate['wofs'] += dataset.wofs
        dataset_intermediate['wofs_total_clean'] += dataset.wofs_total_clean

    def generate_animation(index, combined_data):
        base_index = (task.get_chunk_size()['time'] if task.get_chunk_size()['time'] is not None else 1) * index
        for index in range((task.get_chunk_size()['time'] if task.get_chunk_size()['time'] is not None else 1)):
            path = os.path.join(task.get_temp_path(), "animation_{}.nc".format(base_index + index))
            if os.path.exists(path):
                animated_data = xr.open_dataset(path)
                if task.animated_product.animation_id != "scene" and combined_data:
                    combine_intermediates(combined_data, animated_data)
                # need to wait until last step to mask out wofs < 0.8
                path = os.path.join(task.get_temp_path(), "animation_final_{}.nc".format(base_index + index))
                export_xarray_to_netcdf(animated_data, path)

    combined_data = None
    for index, chunk in enumerate(total_chunks):
        metadata.update(chunk[1])
        data = xr.open_dataset(chunk[0])
        if combined_data is None:
            if task.animated_product.animation_id != "none":
                generate_animation(index, combined_data)
            combined_data = data
            continue
        combine_intermediates(data, combined_data)
        if check_cancel_task(self, task): return
        # if we're animating, combine it all and save to disk.
        if task.animated_product.animation_id != "none":
            generate_animation(index, combined_data)

    path = os.path.join(task.get_temp_path(), "recombined_time_{}.nc".format(geo_chunk_id))
    export_xarray_to_netcdf(combined_data, path)
    logger.info("Done combining time chunks for geo: " + str(geo_chunk_id))
    return path, metadata, {'geo_chunk_id': geo_chunk_id, 'time_chunk_id': time_chunk_id}
Beispiel #6
0
def processing_task(self,
                    task_id=None,
                    geo_chunk_id=None,
                    time_chunk_id=None,
                    geographic_chunk=None,
                    time_chunk=None,
                    **parameters):
    """Process a parameter set and save the results to disk.

    Uses the geographic and time chunk id to identify output products.
    **params is updated with time and geographic ranges then used to load data.
    the task model holds the iterative property that signifies whether the algorithm
    is iterative or if all data needs to be loaded at once.

    Computes a single SLIP baseline comparison - returns a slip mask and mosaic.

    Args:
        task_id, geo_chunk_id, time_chunk_id: identification for the main task and what chunk this is processing
        geographic_chunk: range of latitude and longitude to load - dict with keys latitude, longitude
        time_chunk: list of acquisition dates
        parameters: all required kwargs to load data.

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    chunk_id = "_".join([str(geo_chunk_id), str(time_chunk_id)])
    task = SlipTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    logger.info("Starting chunk: " + chunk_id)
    if not os.path.exists(task.get_temp_path()):
        return None

    metadata = {}

    def _get_datetime_range_containing(*time_ranges):
        return (min(time_ranges) - timedelta(microseconds=1),
                max(time_ranges) + timedelta(microseconds=1))

    time_range = _get_datetime_range_containing(time_chunk[0], time_chunk[-1])
    dc = DataAccessApi(config=task.config_path)
    updated_params = {**parameters}
    updated_params.update(geographic_chunk)
    updated_params.update({'time': time_range})
    data = dc.get_dataset_by_extent(**updated_params)

    #grab dem data as well
    dem_parameters = {**updated_params}
    dem_parameters.update({
        'product': 'terra_aster_gdm_' + task.area_id,
        'platform': 'TERRA'
    })
    dem_parameters.pop('time')
    dem_parameters.pop('measurements')
    dem_data = dc.get_dataset_by_extent(**dem_parameters)

    if 'time' not in data or 'time' not in dem_data:
        return None

    #target data is most recent, with the baseline being everything else.
    target_data = xr.concat([data.isel(time=-1)], 'time')
    baseline_data = data.isel(time=slice(None, -1))

    target_clear_mask = task.satellite.get_clean_mask_func()(target_data)
    baseline_clear_mask = task.satellite.get_clean_mask_func()(baseline_data)
    combined_baseline = task.get_processing_method()(
        baseline_data,
        clean_mask=baseline_clear_mask,
        no_data=task.satellite.no_data_value,
        reverse_time=task.get_reverse_time())

    if check_cancel_task(self, task): return

    target_data = create_mosaic(target_data,
                                clean_mask=target_clear_mask,
                                no_data=task.satellite.no_data_value,
                                reverse_time=task.get_reverse_time())

    if check_cancel_task(self, task): return

    slip_data = compute_slip(combined_baseline,
                             target_data,
                             dem_data,
                             no_data=task.satellite.no_data_value)
    target_data['slip'] = slip_data

    metadata = task.metadata_from_dataset(
        metadata,
        target_data,
        target_clear_mask,
        updated_params,
        time=data.time.values.astype('M8[ms]').tolist()[-1])

    if check_cancel_task(self, task): return

    task.scenes_processed = F('scenes_processed') + 1
    task.save(update_fields=['scenes_processed'])

    path = os.path.join(task.get_temp_path(), chunk_id + ".nc")
    clear_attrs(target_data)
    export_xarray_to_netcdf(target_data, path)
    dc.close()
    logger.info("Done with chunk: " + chunk_id)
    return path, metadata, {
        'geo_chunk_id': geo_chunk_id,
        'time_chunk_id': time_chunk_id
    }
Beispiel #7
0
def create_output_products(self, data, task_id=None):
    """Create the final output products for this algorithm.

    Open the final dataset and metadata and generate all remaining metadata.
    Convert and write the dataset to various formats and register all values in the task model
    Update status and exit.

    Args:
        data: tuple in the format of processing_task function - path, metadata, and {chunk ids}

    """
    if data is None: return None

    task = SpectralAnomalyTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    spectral_index = task.query_type.result_id

    full_metadata = data[3]
    # This is the difference (or "change") composite.
    diff_composite = xr.open_dataset(data[0])
    # This indicates where either the baseline or analysis composite
    # was outside the corresponding user-specified range.
    orig_composite_out_of_range = xr.open_dataset(data[1]) \
        [spectral_index].astype(np.bool).values
    # This indicates where either the baseline or analysis composite
    # was the no_data value.
    composite_no_data = xr.open_dataset(data[2]) \
        [spectral_index].astype(np.bool).values

    # Obtain a NumPy array of the data to create a plot later.
    if spectral_index in ['ndvi', 'ndbi', 'ndwi', 'evi']:
        diff_comp_np_arr = diff_composite[spectral_index].values
    else:  # Fractional Cover
        diff_comp_np_arr = diff_composite['pv'].values
    diff_comp_np_arr[composite_no_data] = np.nan

    task.data_netcdf_path = os.path.join(task.get_result_path(),
                                         "data_netcdf.nc")
    task.data_path = os.path.join(task.get_result_path(), "data_tif.tif")
    task.result_path = os.path.join(task.get_result_path(), "png_mosaic.png")
    task.final_metadata_from_dataset(diff_composite)
    task.metadata_from_dict(full_metadata)

    # 1. Prepare to save the spectral index net change as a GeoTIFF and NetCDF.
    if spectral_index in ['ndvi', 'ndbi', 'ndwi', 'evi']:
        bands = [spectral_index]
    else:  # Fractional Coverage
        bands = ['bs', 'pv', 'npv']
    # 2. Prepare to create a PNG of the spectral index change composite.
    # 2.1. Find the min and max possible difference for the selected spectral index.
    spec_ind_min, spec_ind_max = spectral_indices_range_map[spectral_index]
    diff_min_possible, diff_max_possible = spec_ind_min - spec_ind_max, spec_ind_max - spec_ind_min
    # 2.2. Scale the difference composite to the range [0, 1] for plotting.
    image_data = np.interp(diff_comp_np_arr,
                           (diff_min_possible, diff_max_possible), (0, 1))
    # 2.3. Color by region.
    # 2.3.1. First, color by change.
    # If the user specified a change value range, the product is binary -
    # denoting which pixels fall within the net change threshold.
    cng_min, cng_max = task.change_threshold_min, task.change_threshold_max
    if cng_min is not None and cng_max is not None:
        image_data = np.empty((*image_data.shape, 4), dtype=image_data.dtype)
        image_data[:, :] = mpl.colors.to_rgba('red')
    else:  # otherwise, use a red-green gradient.
        cmap = plt.get_cmap('RdYlGn')
        image_data = cmap(image_data)
    # 2.3.2. Second, color regions in which the change was outside
    #        the optional user-specified change value range.
    change_out_of_range_color = mpl.colors.to_rgba('black')
    if cng_min is not None and cng_max is not None:
        diff_composite_out_of_range = (diff_comp_np_arr <
                                       cng_min) | (cng_max < diff_comp_np_arr)
        image_data[diff_composite_out_of_range] = change_out_of_range_color
    # 2.3.3. Third, color regions in which either the baseline or analysis
    #        composite was outside the user-specified composite value range.
    composite_out_of_range_color = mpl.colors.to_rgba('white')
    image_data[orig_composite_out_of_range] = composite_out_of_range_color
    #  2.3.4. Fourth, color regions in which either the baseline or analysis
    #         composite was the no_data value as transparent.
    composite_no_data_color = np.array([0., 0., 0., 0.])
    image_data[composite_no_data] = composite_no_data_color

    # Create output products (NetCDF, GeoTIFF, PNG).
    export_xarray_to_netcdf(diff_composite, task.data_netcdf_path)
    write_geotiff_from_xr(task.data_path,
                          diff_composite.astype('float32'),
                          bands=bands,
                          no_data=task.satellite.no_data_value)
    plt.imsave(task.result_path, image_data)

    # Plot metadata.
    dates = list(
        map(lambda x: datetime.strptime(x, "%m/%d/%Y"),
            task._get_field_as_list('acquisition_list')))
    if len(dates) > 1:
        task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
        create_2d_plot(task.plot_path,
                       dates=dates,
                       datasets=task._get_field_as_list(
                           'clean_pixel_percentages_per_acquisition'),
                       data_labels="Clean Pixel Percentage (%)",
                       titles="Clean Pixel Percentage Per Acquisition")

    task.complete = True
    task.execution_end = datetime.now()
    task.update_status(
        "OK",
        "All products have been generated. Your result will be loaded on the map."
    )
    return True
def processing_task(self,
                    task_id=None,
                    geo_chunk_id=None,
                    time_chunk_id=None,
                    geographic_chunk=None,
                    time_chunk=None,
                    **parameters):
    """Process a parameter set and save the results to disk.

    Uses the geographic and time chunk id to identify output products.
    **params is updated with time and geographic ranges then used to load data.
    the task model holds the iterative property that signifies whether the algorithm
    is iterative or if all data needs to be loaded at once.

    Args:
        task_id, geo_chunk_id, time_chunk_id: identification for the main task and what chunk this is processing
        geographic_chunk: range of latitude and longitude to load - dict with keys latitude, longitude
        time_chunk: list of acquisition dates
        parameters: all required kwargs to load data.

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    chunk_id = "_".join([str(geo_chunk_id), str(time_chunk_id)])
    task = CustomMosaicToolTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    logger.info("Starting chunk: " + chunk_id)
    if not os.path.exists(task.get_temp_path()):
        return None

    iteration_data = None
    metadata = {}

    def _get_datetime_range_containing(*time_ranges):
        return (min(time_ranges) - timedelta(microseconds=1), max(time_ranges) + timedelta(microseconds=1))

    times = list(
        map(_get_datetime_range_containing, time_chunk)
        if task.get_iterative() else [_get_datetime_range_containing(time_chunk[0], time_chunk[-1])])
    dc = DataAccessApi(config=task.config_path)
    updated_params = parameters
    updated_params.update(geographic_chunk)
    #updated_params.update({'products': parameters['']})
    iteration_data = None
    base_index = (task.get_chunk_size()['time'] if task.get_chunk_size()['time'] is not None else 1) * time_chunk_id
    for time_index, time in enumerate(times):
        # updated_params.update({'time': time})
        data = dc.get_stacked_datasets_by_extent(**updated_params)
        if check_cancel_task(self, task): return

        if data is None or 'time' not in data:
            logger.info("Invalid chunk.")
            continue

        # clear_mask = task.satellite.get_clean_mask_func()(data)
        clear_mask = np.full(data[list(data.data_vars)[0]].shape, True)
        add_timestamp_data_to_xr(data)
        metadata = task.metadata_from_dataset(metadata, data, clear_mask, updated_params)

        iteration_data = task.get_processing_method()(data,
                                                      clean_mask=clear_mask,
                                                      intermediate_product=iteration_data,
                                                      no_data=task.satellite.no_data_value,
                                                      reverse_time=task.get_reverse_time())

        if check_cancel_task(self, task): return

        if task.animated_product.animation_id != "none":
            path = os.path.join(task.get_temp_path(),
                                "animation_{}_{}.nc".format(str(geo_chunk_id), str(base_index + time_index)))
            if task.animated_product.animation_id == "scene":
                #need to clear out all the metadata..
                clear_attrs(data)
                #can't reindex on time - weird?
                export_xarray_to_netcdf(data.isel(time=0).drop('time'), path)
            elif task.animated_product.animation_id == "cumulative":
                export_xarray_to_netcdf(iteration_data, path)

        task.scenes_processed = F('scenes_processed') + 1
        # Avoid overwriting the task's status if it is cancelled.
        task.save(update_fields=['scenes_processed'])

    if iteration_data is None:
        return None
    path = os.path.join(task.get_temp_path(), chunk_id + ".nc")
    export_xarray_to_netcdf(iteration_data, path)
    dc.close()
    logger.info("Done with chunk: " + chunk_id)
    return path, metadata, {'geo_chunk_id': geo_chunk_id, 'time_chunk_id': time_chunk_id}
Beispiel #9
0
def create_output_products(data, task_id=None):
    """Create the final output products for this algorithm.

    Open the final dataset and metadata and generate all remaining metadata.
    Convert and write the dataset to variuos formats and register all values in the task model
    Update status and exit.

    Args:
        data: tuple in the format of processing_task function - path, metadata, and {chunk ids}

    """
    logger.info("CREATE_OUTPUT")
    full_metadata = data[1]
    dataset = xr.open_dataset(data[0])
    task = BandMathTask.objects.get(pk=task_id)

    task.result_path = os.path.join(task.get_result_path(), "band_math.png")
    task.mosaic_path = os.path.join(task.get_result_path(), "png_mosaic.png")
    task.data_path = os.path.join(task.get_result_path(), "data_tif.tif")
    task.data_netcdf_path = os.path.join(task.get_result_path(),
                                         "data_netcdf.nc")
    task.final_metadata_from_dataset(dataset)
    task.metadata_from_dict(full_metadata)

    bands = task.satellite.get_measurements() + ['band_math']

    export_xarray_to_netcdf(dataset, task.data_netcdf_path)
    write_geotiff_from_xr(task.data_path,
                          dataset.astype('int32'),
                          bands=bands,
                          no_data=task.satellite.no_data_value)
    write_png_from_xr(task.mosaic_path,
                      dataset,
                      bands=['red', 'green', 'blue'],
                      scale=task.satellite.get_scale(),
                      no_data=task.satellite.no_data_value)
    write_single_band_png_from_xr(task.result_path,
                                  dataset,
                                  band='band_math',
                                  color_scale=task.color_scale_path,
                                  no_data=task.satellite.no_data_value)

    dates = list(
        map(lambda x: datetime.strptime(x, "%m/%d/%Y"),
            task._get_field_as_list('acquisition_list')))
    if len(dates) > 1:
        task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
        create_2d_plot(task.plot_path,
                       dates=dates,
                       datasets=task._get_field_as_list(
                           'clean_pixel_percentages_per_acquisition'),
                       data_labels="Clean Pixel Percentage (%)",
                       titles="Clean Pixel Percentage Per Acquisition")

    logger.info("All products created.")
    # task.update_bounds_from_dataset(dataset)
    task.complete = True
    task.execution_end = datetime.now()
    task.update_status(
        "OK",
        "All products have been generated. Your result will be loaded on the map."
    )
    shutil.rmtree(task.get_temp_path())
    return True
Beispiel #10
0
def create_output_products(self, data, task_id=None):
    """Create the final output products for this algorithm.

    Open the final dataset and metadata and generate all remaining metadata.
    Convert and write the dataset to variuos formats and register all values in the task model
    Update status and exit.

    Args:
        data: tuple in the format of processing_task function - path, metadata, and {chunk ids}
    """
    task = CoastalChangeTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    full_metadata = data[1]
    dataset = xr.open_dataset(data[0])

    task.result_path = os.path.join(task.get_result_path(),
                                    "coastline_change.png")
    task.result_coastal_change_path = os.path.join(task.get_result_path(),
                                                   "coastal_change.png")
    task.result_mosaic_path = os.path.join(task.get_result_path(),
                                           "mosaic.png")
    task.data_path = os.path.join(task.get_result_path(), "data_tif.tif")
    task.data_netcdf_path = os.path.join(task.get_result_path(),
                                         "data_netcdf.nc")
    task.animation_path = os.path.join(task.get_result_path(
    ), "animation.gif") if task.animated_product.animation_id != 'none' else ""
    task.final_metadata_from_dataset(dataset)
    task.metadata_from_dict(full_metadata)

    bands = task.satellite.get_measurements() + [
        'coastal_change', 'coastline_old', 'coastline_new'
    ]
    png_bands = ['red', 'green', 'blue']

    export_xarray_to_netcdf(dataset, task.data_netcdf_path)
    write_geotiff_from_xr(task.data_path,
                          dataset.astype('int32'),
                          bands=bands,
                          no_data=task.satellite.no_data_value)
    write_png_from_xr(task.result_path,
                      mask_mosaic_with_coastlines(dataset),
                      bands=png_bands,
                      scale=task.satellite.get_scale(),
                      no_data=task.satellite.no_data_value)
    write_png_from_xr(task.result_coastal_change_path,
                      mask_mosaic_with_coastal_change(dataset),
                      bands=png_bands,
                      scale=task.satellite.get_scale(),
                      no_data=task.satellite.no_data_value)
    write_png_from_xr(task.result_mosaic_path,
                      dataset,
                      bands=png_bands,
                      scale=task.satellite.get_scale(),
                      no_data=task.satellite.no_data_value)

    if task.animated_product.animation_id != "none":
        with imageio.get_writer(task.animation_path, mode='I',
                                duration=1.0) as writer:
            for index in range(task.time_end - task.time_start):
                path = os.path.join(task.get_temp_path(),
                                    "animation_{}.png".format(index))
                if os.path.exists(path):
                    image = imageio.imread(path)
                    writer.append_data(image)

    logger.info("All products created.")
    # task.update_bounds_from_dataset(dataset)
    task.complete = True
    task.execution_end = datetime.now()
    task.update_status(
        "OK",
        "All products have been generated. Your result will be loaded on the map."
    )
    return True
Beispiel #11
0
def processing_task(self,
                    task_id=None,
                    geo_chunk_id=None,
                    time_chunk_id=None,
                    geographic_chunk=None,
                    time_chunk=None,
                    **parameters):
    """Process a parameter set and save the results to disk.

    Uses the geographic and time chunk id to identify output products.
    **params is updated with time and geographic ranges then used to load data.
    the task model holds the iterative property that signifies whether the algorithm
    is iterative or if all data needs to be loaded at once.

    Args:
        task_id, geo_chunk_id, time_chunk_id: identification for the main task and what chunk this is processing
        geographic_chunk: range of latitude and longitude to load - dict with keys latitude, longitude
        time_chunk: list of acquisition dates
        parameters: all required kwargs to load data.

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    chunk_id = "_".join([str(geo_chunk_id), str(time_chunk_id)])
    task = NdviAnomalyTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    logger.info("Starting chunk: " + chunk_id)
    if not os.path.exists(task.get_temp_path()):
        return None

    metadata = {}

    base_scene_time_range = parameters['time']

    dc = DataAccessApi(config=task.config_path)
    updated_params = parameters
    updated_params.update(geographic_chunk)

    # Generate the baseline data - one time slice at a time
    full_dataset = []
    for time_index, time in enumerate(time_chunk):
        updated_params.update({'time': _get_datetime_range_containing(time)})
        data = dc.get_dataset_by_extent(**updated_params)

        if check_cancel_task(self, task): return

        if data is None or 'time' not in data:
            logger.info("Invalid chunk.")
            continue
        full_dataset.append(data.copy(deep=True))

    # load selected scene and mosaic just in case we got two scenes (handles scene boundaries/overlapping data)
    updated_params.update({'time': base_scene_time_range})
    selected_scene = dc.get_dataset_by_extent(**updated_params)

    if check_cancel_task(self, task): return

    if len(full_dataset) == 0 or 'time' not in selected_scene:
        return None

    #concat individual slices over time, compute metadata + mosaic
    baseline_data = xr.concat(full_dataset, 'time')
    baseline_clear_mask = task.satellite.get_clean_mask_func()(baseline_data)
    metadata = task.metadata_from_dataset(metadata, baseline_data, baseline_clear_mask, parameters)

    selected_scene_clear_mask = task.satellite.get_clean_mask_func()(selected_scene)
    metadata = task.metadata_from_dataset(metadata, selected_scene, selected_scene_clear_mask, parameters)
    selected_scene = task.get_processing_method()(selected_scene,
                                                  clean_mask=selected_scene_clear_mask,
                                                  intermediate_product=None,
                                                  no_data=task.satellite.no_data_value)
    # we need to re generate the clear mask using the mosaic now.
    selected_scene_clear_mask = task.satellite.get_clean_mask_func()(selected_scene)

    if check_cancel_task(self, task): return

    ndvi_products = compute_ndvi_anomaly(
        baseline_data,
        selected_scene,
        baseline_clear_mask=baseline_clear_mask,
        selected_scene_clear_mask=selected_scene_clear_mask,
        no_data=task.satellite.no_data_value)
    full_product = xr.merge([ndvi_products, selected_scene])

    task.scenes_processed = F('scenes_processed') + 1
    task.save(update_fields=['scenes_processed'])

    path = os.path.join(task.get_temp_path(), chunk_id + ".nc")
    export_xarray_to_netcdf(full_product, path)
    dc.close()
    logger.info("Done with chunk: " + chunk_id)
    return path, metadata, {'geo_chunk_id': geo_chunk_id, 'time_chunk_id': time_chunk_id}
Beispiel #12
0
def processing_task(self,
                    task_id=None,
                    geo_chunk_id=None,
                    time_chunk_id=None,
                    geographic_chunk=None,
                    time_chunk=None,
                    **parameters):
    """Process a parameter set and save the results to disk.

    Uses the geographic and time chunk id to identify output products.
    **params is updated with time and geographic ranges then used to load data.
    the task model holds the iterative property that signifies whether the algorithm
    is iterative or if all data needs to be loaded at once.

    Args:
        task_id, geo_chunk_id, time_chunk_id: identification for the main task and what chunk this is processing
        geographic_chunk: range of latitude and longitude to load - dict with keys latitude, longitude
        time_chunk: list of acquisition dates
        parameters: all required kwargs to load data.

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    chunk_id = "_".join([str(geo_chunk_id), str(time_chunk_id)])
    task = CoastalChangeTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    logger.info("Starting chunk: " + chunk_id)
    if not os.path.exists(task.get_temp_path()):
        return None

    def _get_datetime_range_containing(*time_ranges):
        return (min(time_ranges) - timedelta(microseconds=1),
                max(time_ranges) + timedelta(microseconds=1))

    starting_year = _get_datetime_range_containing(*time_chunk[0])
    comparison_year = _get_datetime_range_containing(*time_chunk[1])

    dc = DataAccessApi(config=task.config_path)
    updated_params = parameters
    updated_params.update(geographic_chunk)

    def _compute_mosaic(time):
        """
        Loads data for some time range for the current geographic chunk,
        returning 3 objects - the mosaic, the task metadata, and the number of
        acquisitions that were in the retrieved data.
        """
        updated_params.update({'time': time})
        data = dc.get_dataset_by_extent(**updated_params)
        if data is None or 'time' not in data:
            logger.info("Invalid chunk.")
            return None, None, None

        clear_mask = task.satellite.get_clean_mask_func()(data)
        metadata = task.metadata_from_dataset({}, data, clear_mask,
                                              updated_params)
        return task.get_processing_method()(data, clean_mask=clear_mask, no_data=task.satellite.no_data_value), \
               metadata, len(data['time'])

    if check_cancel_task(self, task): return
    old_mosaic, old_metadata, num_scenes_old = _compute_mosaic(starting_year)
    if old_mosaic is None: return None
    task.scenes_processed = F('scenes_processed') + num_scenes_old
    # Avoid overwriting the task's status if it is cancelled.
    task.save(update_fields=['scenes_processed'])

    if check_cancel_task(self, task): return
    new_mosaic, new_metadata, num_scenes_new = _compute_mosaic(comparison_year)
    if new_mosaic is None: return None
    task.scenes_processed = F('scenes_processed') + num_scenes_new
    task.save(update_fields=['scenes_processed'])

    if check_cancel_task(self, task): return

    metadata = {**old_metadata, **new_metadata}

    output_product = compute_coastal_change(
        old_mosaic, new_mosaic, no_data=task.satellite.no_data_value)

    if check_cancel_task(self, task): return

    path = os.path.join(task.get_temp_path(), chunk_id + ".nc")
    export_xarray_to_netcdf(output_product, path)
    dc.close()
    logger.info("Done with chunk: " + chunk_id)
    return path, metadata, {
        'geo_chunk_id': geo_chunk_id,
        'time_chunk_id': time_chunk_id
    }
Beispiel #13
0
def recombine_time_chunks(self, chunks, task_id=None, num_scn_per_chk=None):
    """Recombine processed chunks over the time index.

    Open time chunked processed datasets and recombine them using the same function
    that was used to process them. This assumes an iterative algorithm - if it is not, then it will
    simply return the data again.

    Args:
        chunks: list of the return from the processing_task function - path, metadata, and {chunk ids}
        num_scn_per_chk: The number of scenes per chunk. Used to determine task progress.

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    task = FractionalCoverTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    #sorting based on time id - earlier processed first as they're incremented e.g. 0, 1, 2..
    chunks = chunks if isinstance(chunks, list) else [chunks]
    chunks = [chunk for chunk in chunks if chunk is not None]
    if len(chunks) == 0:
        return None
    total_chunks = sorted(chunks, key=lambda x: x[0])
    geo_chunk_id = total_chunks[0][2]['geo_chunk_id']
    time_chunk_id = total_chunks[0][2]['time_chunk_id']

    metadata = {}
    combined_data = None
    for index, chunk in enumerate(total_chunks):
        metadata.update(chunk[1])
        data = xr.open_dataset(chunk[0])
        if combined_data is None:
            combined_data = data
            task.scenes_processed = F('scenes_processed') + num_scn_per_chk
            task.save(update_fields=['scenes_processed'])
            continue
        #give time an indice to keep mosaicking from breaking.
        data = xr.concat([data], 'time')
        data['time'] = [0]
        clear_mask = task.satellite.get_clean_mask_func()(data)

        combined_data = task.get_processing_method()(
            data,
            clean_mask=clear_mask,
            intermediate_product=combined_data,
            no_data=task.satellite.no_data_value,
            reverse_time=task.get_reverse_time())
        if check_cancel_task(self, task): return
        task.scenes_processed = F('scenes_processed') + num_scn_per_chk
        task.save(update_fields=['scenes_processed'])
    if combined_data is None:
        return None

    path = os.path.join(task.get_temp_path(),
                        "recombined_time_{}.nc".format(geo_chunk_id))
    export_xarray_to_netcdf(combined_data, path)
    logger.info("Done combining time chunks for geo: " + str(geo_chunk_id))
    return path, metadata, {
        'geo_chunk_id': geo_chunk_id,
        'time_chunk_id': time_chunk_id
    }
Beispiel #14
0
def recombine_time_chunks(self, chunks, task_id=None):
    """Recombine processed chunks over the time index.

    Open time chunked processed datasets and recombine them using the same function
    that was used to process them. This assumes an iterative algorithm - if it is not, then it will
    simply return the data again.

    Args:
        chunks: list of the return from the processing_task function - path, metadata, and {chunk ids}

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    task = SlipTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    #sorting based on time id - earlier processed first as they're incremented e.g. 0, 1, 2..
    chunks = chunks if isinstance(chunks, list) else [chunks]
    chunks = [chunk for chunk in chunks if chunk is not None]
    if len(chunks) == 0:
        return None
    total_chunks = sorted(chunks, key=lambda x: x[0])
    geo_chunk_id = total_chunks[0][2]['geo_chunk_id']
    time_chunk_id = total_chunks[0][2]['time_chunk_id']
    metadata = {}

    combined_data = None
    combined_slip = None
    for index, chunk in enumerate(reversed(total_chunks)):
        metadata.update(chunk[1])
        data = xr.open_dataset(chunk[0])
        if combined_data is None:
            combined_data = data.drop('slip')
            # since this is going to interact with data/mosaicking, it needs a time dim
            combined_slip = xr.concat([data.slip.copy(deep=True)], 'time')
            continue
        #give time an indice to keep mosaicking from breaking.
        data = xr.concat([data], 'time')
        data['time'] = [0]
        clear_mask = task.satellite.get_clean_mask_func()(data)
        # modify clean mask so that only slip pixels that are still zero will be used. This will show all the pixels that caused the flag.
        clear_mask[xr.concat([combined_slip], 'time').values == 1] = False
        combined_data = create_mosaic(data.drop('slip'),
                                      clean_mask=clear_mask,
                                      intermediate_product=combined_data,
                                      no_data=task.satellite.no_data_value,
                                      reverse_time=task.get_reverse_time())
        combined_slip.values[combined_slip.values == 0] = data.slip.values[
            combined_slip.values == 0]
        if check_cancel_task(self, task): return

    # Since we added a time dim to combined_slip, we need to remove it here.
    combined_data['slip'] = combined_slip.isel(time=0, drop=True)
    path = os.path.join(task.get_temp_path(),
                        "recombined_time_{}.nc".format(geo_chunk_id))
    export_xarray_to_netcdf(combined_data, path)
    logger.info("Done combining time chunks for geo: " + str(geo_chunk_id))
    return path, metadata, {
        'geo_chunk_id': geo_chunk_id,
        'time_chunk_id': time_chunk_id
    }
Beispiel #15
0
def create_output_products(self, data, task_id=None):
    """Create the final output products for this algorithm.

    Open the final dataset and metadata and generate all remaining metadata.
    Convert and write the dataset to variuos formats and register all values in the task model
    Update status and exit.

    Args:
        data: tuple in the format of processing_task function - path, metadata, and {chunk ids}
    """
    task = CustomMosaicToolTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    full_metadata = data[1]
    dataset = xr.open_dataset(data[0])

    task.result_path = os.path.join(task.get_result_path(), "png_mosaic.png")
    task.result_filled_path = os.path.join(task.get_result_path(),
                                           "filled_png_mosaic.png")
    task.data_path = os.path.join(task.get_result_path(), "data_tif.tif")
    task.data_netcdf_path = os.path.join(task.get_result_path(),
                                         "data_netcdf.nc")
    task.animation_path = os.path.join(task.get_result_path(
    ), "animation.gif") if task.animated_product.animation_id != 'none' else ""
    task.final_metadata_from_dataset(dataset)
    task.metadata_from_dict(full_metadata)

    bands = task.satellite.get_measurements()
    png_bands = [
        task.query_type.red, task.query_type.green, task.query_type.blue
    ]

    export_xarray_to_netcdf(dataset, task.data_netcdf_path)
    write_geotiff_from_xr(task.data_path,
                          dataset.astype('int32'),
                          bands=bands,
                          no_data=task.satellite.no_data_value)
    write_png_from_xr(task.result_path,
                      dataset,
                      bands=png_bands,
                      png_filled_path=task.result_filled_path,
                      fill_color=task.query_type.fill,
                      scale=task.satellite.get_scale(),
                      no_data=task.satellite.no_data_value)

    if task.animated_product.animation_id != "none":
        with imageio.get_writer(task.animation_path, mode='I',
                                duration=1.0) as writer:
            valid_range = reversed(
                range(len(full_metadata))
            ) if task.animated_product.animation_id == "scene" and task.get_reverse_time(
            ) else range(len(full_metadata))
            for index in valid_range:
                path = os.path.join(task.get_temp_path(),
                                    "animation_{}.png".format(index))
                if os.path.exists(path):
                    image = imageio.imread(path)
                    writer.append_data(image)

    dates = list(
        map(lambda x: datetime.strptime(x, "%m/%d/%Y"),
            task._get_field_as_list('acquisition_list')))
    if len(dates) > 1:
        task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
        create_2d_plot(task.plot_path,
                       dates=dates,
                       datasets=task._get_field_as_list(
                           'clean_pixel_percentages_per_acquisition'),
                       data_labels="Clean Pixel Percentage (%)",
                       titles="Clean Pixel Percentage Per Acquisition")

    logger.info("All products created.")
    # task.update_bounds_from_dataset(dataset)
    task.complete = True
    task.execution_end = datetime.now()
    task.update_status(
        "OK",
        "All products have been generated. Your result will be loaded on the map."
    )
    return True
Beispiel #16
0
def processing_task(self,
                    task_id=None,
                    geo_chunk_id=None,
                    geographic_chunk=None,
                    num_scn_per_chk=None,
                    **parameters):
    """Process a parameter set and save the results to disk.

    Uses the geographic and time chunk id to identify output products.
    **params is updated with time and geographic ranges then used to load data.
    the task model holds the iterative property that signifies whether the algorithm
    is iterative or if all data needs to be loaded at once.

    Args:
        task_id, geo_chunk_id: identification for the main task and what chunk this is processing
        geographic_chunk: range of latitude and longitude to load - dict with keys latitude, longitude
        num_scn_per_chk: A dictionary of the number of scenes per chunk for the baseline
                         and analysis extents. Used to determine task progress.
        parameters: all required kwargs to load data.

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    chunk_id = str(geo_chunk_id)
    task = SpectralAnomalyTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    if not os.path.exists(task.get_temp_path()):
        return None

    metadata = {}

    # For both the baseline and analysis time ranges for this
    # geographic chunk, load, calculate the spectral index, composite,
    # and filter the data according to user-supplied parameters -
    # recording where the data was out of the filter's range so we can
    # create the output product (an image).
    dc = DataAccessApi(config=task.config_path)
    updated_params = parameters
    updated_params.update(geographic_chunk)
    spectral_index = task.query_type.result_id
    composites = {}
    composites_out_of_range = {}
    no_data_value = task.satellite.no_data_value
    for composite_name in ['baseline', 'analysis']:
        if check_cancel_task(self, task): return

        # Use the corresponding time range for the baseline and analysis data.
        updated_params['time'] = \
            updated_params['baseline_time' if composite_name == 'baseline' else 'analysis_time']
        time_column_data = dc.get_dataset_by_extent(**updated_params)
        # If this geographic chunk is outside the data extents, return None.
        if len(time_column_data.dims) == 0: return None

        # Obtain the clean mask for the satellite.
        time_column_clean_mask = task.satellite.get_clean_mask_func()(
            time_column_data)
        measurements_list = task.satellite.measurements.replace(" ",
                                                                "").split(",")
        # Obtain the mask for valid Landsat values.
        time_column_invalid_mask = landsat_clean_mask_invalid(\
            time_column_data, platform=task.satellite.platform,
            collection=task.satellite.collection, level=task.satellite.level).values
        # Also exclude data points with the no_data value.
        no_data_mask = time_column_data[
            measurements_list[0]].values != no_data_value
        # Combine the clean masks.
        time_column_clean_mask = time_column_clean_mask | time_column_invalid_mask | no_data_mask

        # Obtain the composite.
        composite = task.get_processing_method()(
            time_column_data,
            clean_mask=time_column_clean_mask,
            no_data=task.satellite.no_data_value)
        # Obtain the mask for valid Landsat values.
        composite_invalid_mask = landsat_clean_mask_invalid(\
            composite, platform=task.satellite.platform,
            collection=task.satellite.collection, level=task.satellite.level).values
        # Also exclude data points with the no_data value via the compositing mask.
        composite_no_data_mask = composite[
            measurements_list[0]].values != no_data_value
        composite_clean_mask = composite_invalid_mask | composite_no_data_mask

        # Compute the spectral index for the composite.
        spec_ind_params = dict()
        if spectral_index == 'fractional_cover':
            spec_ind_params = dict(clean_mask=composite_clean_mask,
                                   no_data=no_data_value)
        spec_ind_result = spectral_indices_function_map[spectral_index](
            composite, **spec_ind_params)
        if spectral_index in ['ndvi', 'ndbi', 'ndwi', 'evi']:
            composite[spectral_index] = spec_ind_result
        else:  # Fractional Cover
            composite = xr.merge([composite, spec_ind_result])
            # Fractional Cover is supposed to have a range of [0, 100], with its bands -
            # 'bs', 'pv', and 'npv' - summing to 100. However, the function we use
            # can have the sum of those bands as high as 106.
            # frac_cov_min, frac_cov_max = spectral_indices_range_map[spectral_index]
            frac_cov_min, frac_cov_max = 0, 106
            for band in ['bs', 'pv', 'npv']:
                composite[band].values = \
                    np.interp(composite[band].values, (frac_cov_min, frac_cov_max),
                              spectral_indices_range_map[spectral_index])

        composites[composite_name] = composite

        # Determine where the composite is out of range.
        # We rename the resulting xarray.DataArray because calling to_netcdf()
        # on it at the end of this function will save it as a Dataset
        # with one data variable with the same name as the DataArray.
        if spectral_index in ['ndvi', 'ndbi', 'ndwi', 'evi']:
            composites_out_of_range[composite_name] = \
                xr_or(composite[spectral_index] < task.composite_threshold_min,
                      task.composite_threshold_max < composite[spectral_index]).rename(spectral_index)
        else:  # Fractional Cover
            # For fractional cover, a composite pixel is out of range if any of its
            # fractional cover bands are out of range.
            composites_out_of_range[composite_name] = xr_or(
                xr_or(
                    xr_or(composite['bs'] < task.composite_threshold_min,
                          task.composite_threshold_max < composite['bs']),
                    xr_or(composite['pv'] < task.composite_threshold_min,
                          task.composite_threshold_max < composite['pv'])),
                xr_or(composite['npv'] < task.composite_threshold_min,
                      task.composite_threshold_max <
                      composite['npv'])).rename(spectral_index)

        # Update the metadata with the current data (baseline or analysis).
        metadata = task.metadata_from_dataset(metadata, time_column_data,
                                              time_column_clean_mask,
                                              parameters)
        # Record task progress (baseline or analysis composite data obtained).
        task.scenes_processed = F(
            'scenes_processed') + num_scn_per_chk[composite_name]
        task.save(update_fields=['scenes_processed'])
    dc.close()

    if check_cancel_task(self, task): return
    # Create a difference composite.
    diff_composite = composites['analysis'] - composites['baseline']
    # Find where either the baseline or analysis composite was out of range for a pixel.
    composite_out_of_range = xr_or(*composites_out_of_range.values())
    # Find where either the baseline or analysis composite was no_data.
    if spectral_index in ['ndvi', 'ndbi', 'ndwi', 'evi']:
        composite_no_data = xr_or(
            composites['baseline'][measurements_list[0]] == no_data_value,
            composites['analysis'][measurements_list[0]] == no_data_value)
        if spectral_index == 'evi':  # EVI returns no_data for values outside [-1,1].
            composite_no_data = xr_or(
                composite_no_data,
                xr_or(composites['baseline'][spectral_index] == no_data_value,
                      composites['analysis'][spectral_index] == no_data_value))
    else:  # Fractional Cover
        composite_no_data = xr_or(
            xr_or(
                xr_or(composites['baseline']['bs'] == no_data_value,
                      composites['baseline']['pv'] == no_data_value),
                composites['baseline']['npv'] == no_data_value),
            xr_or(
                xr_or(composites['baseline']['bs'] == no_data_value,
                      composites['baseline']['pv'] == no_data_value),
                composites['baseline']['npv'] == no_data_value))
    composite_no_data = composite_no_data.rename(spectral_index)

    # Drop unneeded data variables.
    diff_composite = diff_composite.drop(measurements_list)

    if check_cancel_task(self, task): return

    composite_path = os.path.join(task.get_temp_path(), chunk_id + ".nc")
    export_xarray_to_netcdf(diff_composite, composite_path)
    composite_out_of_range_path = os.path.join(task.get_temp_path(),
                                               chunk_id + "_out_of_range.nc")
    logger.info("composite_out_of_range:" + str(composite_out_of_range))
    export_xarray_to_netcdf(composite_out_of_range,
                            composite_out_of_range_path)
    composite_no_data_path = os.path.join(task.get_temp_path(),
                                          chunk_id + "_no_data.nc")
    export_xarray_to_netcdf(composite_no_data, composite_no_data_path)
    return composite_path, composite_out_of_range_path, composite_no_data_path, \
           metadata, {'geo_chunk_id': geo_chunk_id}
Beispiel #17
0
def processing_task(self,
                    task_id=None,
                    geo_chunk_id=None,
                    time_chunk_id=None,
                    geographic_chunk=None,
                    time_chunk=None,
                    **parameters):
    """Process a parameter set and save the results to disk.

    Uses the geographic and time chunk id to identify output products.
    **params is updated with time and geographic ranges then used to load data.
    the task model holds the iterative property that signifies whether the algorithm
    is iterative or if all data needs to be loaded at once.

    Args:
        task_id, geo_chunk_id, time_chunk_id: identification for the main task and what chunk this is processing
        geographic_chunk: range of latitude and longitude to load - dict with keys latitude, longitude
        time_chunk: list of acquisition dates
        parameters: all required kwargs to load data.

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    chunk_id = "_".join([str(geo_chunk_id), str(time_chunk_id)])
    task = UrbanizationTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    logger.info("Starting chunk: " + chunk_id)
    if not os.path.exists(task.get_temp_path()):
        return None

    metadata = {}

    def _get_datetime_range_containing(*time_ranges):
        return (min(time_ranges) - timedelta(microseconds=1),
                max(time_ranges) + timedelta(microseconds=1))

    times = list(
        map(_get_datetime_range_containing, time_chunk) if task.get_iterative(
        ) else [_get_datetime_range_containing(time_chunk[0], time_chunk[-1])])
    dc = DataAccessApi(config=task.config_path)
    updated_params = parameters
    updated_params.update(geographic_chunk)
    iteration_data = None
    for time_index, time in enumerate(times):
        updated_params.update({'time': time})
        data = dc.get_dataset_by_extent(**updated_params)

        if check_cancel_task(self, task): return

        if data is None or 'time' not in data:
            logger.info("Invalid chunk.")
            continue

        clear_mask = task.satellite.get_clean_mask_func()(data)
        add_timestamp_data_to_xr(data)

        metadata = task.metadata_from_dataset(metadata, data, clear_mask,
                                              updated_params)

        iteration_data = task.get_processing_method()(
            data,
            clean_mask=clear_mask,
            intermediate_product=iteration_data,
            no_data=task.satellite.no_data_value,
            reverse_time=task.get_reverse_time())

        if check_cancel_task(self, task): return

        task.scenes_processed = F('scenes_processed') + 1
        task.save(update_fields=['scenes_processed'])
    if iteration_data is None:
        return None
    path = os.path.join(task.get_temp_path(), chunk_id + ".nc")
    export_xarray_to_netcdf(iteration_data, path)
    dc.close()
    logger.info("Done with chunk: " + chunk_id)
    return path, metadata, {
        'geo_chunk_id': geo_chunk_id,
        'time_chunk_id': time_chunk_id
    }
Beispiel #18
0
def recombine_time_chunks(self, chunks, task_id=None):
    """Recombine processed chunks over the time index.

    Open time chunked processed datasets and recombine them using the same function
    that was used to process them. This assumes an iterative algorithm - if it is not, then it will
    simply return the data again.

    Args:
        chunks: list of the return from the processing_task function - path, metadata, and {chunk ids}

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    task = WaterDetectionTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    #sorting based on time id - earlier processed first as they're incremented e.g. 0, 1, 2..
    chunks = chunks if isinstance(chunks, list) else [chunks]
    chunks = [chunk for chunk in chunks if chunk is not None]
    if len(chunks) == 0:
        return None
    total_chunks = sorted(chunks, key=lambda x: x[0])
    geo_chunk_id = total_chunks[0][2]['geo_chunk_id']
    time_chunk_id = total_chunks[0][2]['time_chunk_id']

    def combine_intermediates(dataset, dataset_intermediate):
        """
        functions used to combine time sliced data after being combined geographically.
        This compounds the results of the time slice and recomputes the normalized data.
        """
        dataset_intermediate['total_data'] += dataset.total_data
        dataset_intermediate['total_clean'] += dataset.total_clean
        dataset_intermediate['normalized_data'] = dataset_intermediate[
            'total_data'] / dataset_intermediate['total_clean']

    def generate_animation(index, combined_data):
        base_index = (task.get_chunk_size()['time'] if
                      task.get_chunk_size()['time'] is not None else 1) * index
        for index in range((task.get_chunk_size()['time'] if
                            task.get_chunk_size()['time'] is not None else 1)):
            path = os.path.join(task.get_temp_path(),
                                "animation_{}.nc".format(base_index + index))
            if os.path.exists(path):
                animated_data = xr.open_dataset(path)
                if task.animated_product.animation_id != "scene" and combined_data:
                    combine_intermediates(combined_data, animated_data)
                path = os.path.join(
                    task.get_temp_path(),
                    "animation_{}.png".format(base_index + index))

                write_single_band_png_from_xr(
                    path,
                    animated_data,
                    task.animated_product.data_variable,
                    color_scale=task.color_scales[
                        task.animated_product.data_variable],
                    fill_color=task.query_type.fill,
                    interpolate=False,
                    no_data=task.satellite.no_data_value)

    metadata = {}
    combined_data = None
    for index, chunk in enumerate(total_chunks):
        metadata.update(chunk[1])
        data = xr.open_dataset(chunk[0])
        if combined_data is None:
            if task.animated_product.animation_id != "none":
                generate_animation(index, combined_data)
            combined_data = data
            continue
        combine_intermediates(data, combined_data)
        # if we're animating, combine it all and save to disk.
        if task.animated_product.animation_id != "none":
            generate_animation(index, combined_data)

    path = os.path.join(task.get_temp_path(),
                        "recombined_time_{}.nc".format(geo_chunk_id))
    export_xarray_to_netcdf(combined_data, path)
    logger.info("Done combining time chunks for geo: " + str(geo_chunk_id))
    return path, metadata, {
        'geo_chunk_id': geo_chunk_id,
        'time_chunk_id': time_chunk_id
    }