Example #1
0
def validate_parameters(self, parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None
    """
    task = FractionalCoverTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    dc = DataAccessApi(config=task.config_path)

    #validate for any number of criteria here - num acquisitions, etc.
    acquisitions = dc.list_combined_acquisition_dates(**parameters)

    if len(acquisitions) < 1:
        task.complete = True
        task.update_status("ERROR",
                           "There are no acquistions for this parameter set.")
        return None

    if not task.compositor.is_iterative() and (task.time_end -
                                               task.time_start).days > 367:
        task.complete = True
        task.update_status(
            "ERROR",
            "Median pixel operations are only supported for single year time periods."
        )
        return None

    if check_cancel_task(self, task): return
    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['products'][0],
                                    parameters['measurements']):
        task.complete = True
        task.update_status(
            "ERROR",
            "The provided Satellite model measurements aren't valid for the product. Please check the measurements listed in the {} model."
            .format(task.satellite.name))
        return None

    dc.close()
    return parameters
Example #2
0
def validate_parameters(parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None

    """
    task = AppNameTask.objects.get(pk=task_id)
    dc = DataAccessApi(config=task.config_path)

    #validate for any number of criteria here - num acquisitions, etc.
    # TODO: if this is not a multisensory app, replace list_combined_acquisition_dates with list_acquisition_dates
    acquisitions = dc.list_combined_acquisition_dates(**parameters)

    # TODO: are there any additional validations that need to be done here?
    if len(acquisitions) < 1:
        task.complete = True
        task.update_status("ERROR",
                           "There are no acquistions for this parameter set.")
        return None

    if task.animated_product.animation_id != "none" and not task.compositor.is_iterative(
    ):
        task.complete = True
        task.update_status(
            "ERROR",
            "Animations cannot be generated for median pixel operations.")
        return None

    task.update_status("WAIT", "Validated parameters.")

    # TODO: Check that the measurements exist - replace ['products'][0] with ['products'] if this is not a multisensory app.
    if not dc.validate_measurements(parameters['products'][0],
                                    parameters['measurements']):
        task.complete = True
        task.update_status(
            "ERROR",
            "The provided Satellite model measurements aren't valid for the product. Please check the measurements listed in the {} model."
            .format(task.satellite.name))
        return None

    dc.close()
    return parameters
Example #3
0
def parse_parameters_from_task(self, task_id=None):
    """Parse out required DC parameters from the task model.

    See the DataAccessApi docstrings for more information.
    Parses out platforms, products, etc. to be used with DataAccessApi calls.

    If this is a multisensor app, platform and product should be pluralized and used
    with the get_stacked_datasets_by_extent call rather than the normal get.

    Returns:
        parameter dict with all keyword args required to load data.

    """
    task = SpectralAnomalyTask.objects.get(pk=task_id)

    parameters = {
        'platform':
        task.satellite.datacube_platform,
        'product':
        task.satellite.get_products(task.area_id)[0],
        'time': (task.time_start, task.time_end),
        'baseline_time': (task.baseline_time_start, task.baseline_time_end),
        'analysis_time': (task.analysis_time_start, task.analysis_time_end),
        'longitude': (task.longitude_min, task.longitude_max),
        'latitude': (task.latitude_min, task.latitude_max),
        'measurements':
        task.satellite.get_measurements(),
        'composite_range':
        (task.composite_threshold_min, task.composite_threshold_max),
        'change_range': (task.change_threshold_min, task.change_threshold_max),
    }

    task.execution_start = datetime.now()
    if check_cancel_task(self, task): return
    task.update_status("WAIT", "Parsed out parameters.")

    return parameters
Example #4
0
def pixel_drill(task_id=None):
    parameters = parse_parameters_from_task(task_id=task_id)
    validate_parameters(parameters, task_id=task_id)
    task = TsmTask.objects.get(pk=task_id)

    if task.status == "ERROR":
        return None

    dc = DataAccessApi(config=task.config_path)
    single_pixel = dc.get_stacked_datasets_by_extent(**parameters)
    clear_mask = task.satellite.get_clean_mask_func()(single_pixel.isel(latitude=0, longitude=0))
    single_pixel = single_pixel.where(single_pixel != task.satellite.no_data_value)

    dates = single_pixel.time.values
    if len(dates) < 2:
        task.update_status("ERROR", "There is only a single acquisition for your parameter set.")
        return None

    wofs_data = task.get_processing_method()(single_pixel,
                                             clean_mask=clear_mask,
                                             enforce_float64=True,
                                             no_data=task.satellite.no_data_value)
    wofs_data = wofs_data.where(wofs_data != task.satellite.no_data_value).isel(latitude=0, longitude=0)
    tsm_data = tsm(single_pixel, clean_mask=clear_mask, no_data=task.satellite.no_data_value)
    tsm_data = tsm_data.where(tsm_data != task.satellite.no_data_value).isel(
        latitude=0, longitude=0).where((wofs_data.wofs.values == 1))

    datasets = [wofs_data.wofs.values.transpose(), tsm_data.tsm.values.transpose()] + [clear_mask]
    data_labels = ["Water/Non Water", "TSM (g/L)"] + ["Clear"]
    titles = ["Water/Non Water", "TSM Values"] + ["Clear Mask"]
    style = ['.', 'r-o', '.']

    task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
    create_2d_plot(task.plot_path, dates=dates, datasets=datasets, data_labels=data_labels, titles=titles, style=style)

    task.complete = True
    task.update_status("OK", "Done processing pixel drill.")
Example #5
0
def parse_parameters_from_task(task_id=None):
    """Parse out required DC parameters from the task model.

    See the DataAccessApi docstrings for more information.
    Parses out platforms, products, etc. to be used with DataAccessApi calls.

    If this is a multisensor app, platform and product should be pluralized and used
    with the get_stacked_datasets_by_extent call rather than the normal get.

    Returns:
        parameter dict with all keyword args required to load data.

    """
    task = AppNameTask.objects.get(pk=task_id)

    parameters = {
        # TODO: If this is not a multisensory app, uncomment 'platform' and remove 'platforms'
        # 'platform': task.platform,
        'platforms': sorted(task.platform.split(",")),
        'time': (task.time_start, task.time_end),
        'longitude': (task.longitude_min, task.longitude_max),
        'latitude': (task.latitude_min, task.latitude_max),
        'measurements': task.measurements
    }

    # TODO: If this is not a multisensory app, remove 'products' and uncomment the line below.
    # parameters['product'] = Satellite.objects.get(datacube_platform=parameters['platform']).product_prefix + task.area_id
    parameters['products'] = [
        Satellite.objects.get(datacube_platform=platform).product_prefix +
        task.area_id for platform in parameters['platforms']
    ]

    task.execution_start = datetime.now()
    task.update_status("WAIT", "Parsed out parameters.")

    return parameters
Example #6
0
def perform_task_chunking(parameters, task_id=None):
    """Chunk parameter sets into more manageable sizes

    Uses functions provided by the task model to create a group of
    parameter sets that make up the arg.

    Args:
        parameters: parameter stream containing all kwargs to load data

    Returns:
        parameters with a list of geographic and time ranges

    """

    if parameters is None:
        return None

    task = CustomMosaicToolTask.objects.get(pk=task_id)
    dc = DataAccessApi(config=task.config_path)
    dates = dc.list_combined_acquisition_dates(**parameters)
    task_chunk_sizing = task.get_chunk_size()

    product = parameters['products'][0]

    geographic_chunks = create_geographic_chunks(
        longitude=parameters['longitude'],
        latitude=parameters['latitude'],
        geographic_chunk_size=task_chunk_sizing['geographic'])

    time_chunks = create_time_chunks(
        dates, _reversed=task.get_reverse_time(), time_chunk_size=task_chunk_sizing['time'])
    logger.info("Time chunks: {}, Geo chunks: {}".format(len(time_chunks), len(geographic_chunks)))

    dc.close()
    task.update_status("WAIT", "Chunked parameter set.")
    return {'parameters': parameters, 'geographic_chunks': geographic_chunks, 'time_chunks': time_chunks}
Example #7
0
def validate_parameters(parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None

    """
    task = SlipTask.objects.get(pk=task_id)
    dc = DataAccessApi(config=task.config_path)

    acquisitions = dc.list_acquisition_dates(**parameters)

    if len(acquisitions) < 1:
        task.complete = True
        task.update_status("ERROR", "There are no acquistions for this parameter set.")
        return None

    if len(acquisitions) < task.baseline_length + 1:
        task.complete = True
        task.update_status("ERROR", "There are an insufficient number of acquisitions for your baseline length.")
        return None

    validation_parameters = {**parameters}
    validation_parameters.pop('time')
    validation_parameters.pop('measurements')
    validation_parameters.update({'product': 'terra_aster_gdm_' + task.area_id, 'platform': 'TERRA'})
    if len(dc.list_acquisition_dates(**validation_parameters)) < 1:
        task.complete = True
        task.update_status("ERROR", "There is no elevation data for this parameter set.")
        return None

    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['product'], parameters['measurements']):
        parameters['measurements'] = ['blue', 'green', 'red', 'nir', 'swir1', 'swir2', 'pixel_qa']

    dc.close()
    return parameters
Example #8
0
def validate_parameters(parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None

    """
    task = NdviAnomalyTask.objects.get(pk=task_id)
    dc = DataAccessApi(config=task.config_path)

    acquisitions = dc.list_acquisition_dates(**parameters)

    if len(acquisitions) < 1:
        task.complete = True
        task.update_status("ERROR",
                           "There are no acquistions for this parameter set.")
        return None

    # the actual acquisitino exists, lets try the baseline:
    validation_params = {**parameters}
    # there were no acquisitions in the year 1000, hopefully
    validation_params.update({
        'time': (task.time_start.replace(year=task.time_start.year - 5),
                 task.time_start - timedelta(microseconds=1))
    })
    acquisitions = dc.list_acquisition_dates(**validation_params)

    # list/map/int chain required to cast int to each baseline month, it won't work if they're strings.
    grouped_dates = group_datetimes_by_month(
        acquisitions,
        months=list(map(int, task.baseline_selection.split(","))))

    if not grouped_dates:
        task.complete = True
        task.update_status("ERROR",
                           "There are no acquistions for this parameter set.")
        return None
    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['product'],
                                    parameters['measurements']):
        parameters['measurements'] = [
            'blue', 'green', 'red', 'nir', 'swir1', 'swir2', 'pixel_qa'
        ]

    dc.close()
    return parameters
Example #9
0
def validate_parameters(parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None

    """
    task = CustomMosaicToolTask.objects.get(pk=task_id)
    dc = DataAccessApi(config=task.config_path)

    #validate for any number of criteria here - num acquisitions, etc.
    acquisitions = dc.list_combined_acquisition_dates(**parameters)

    if len(acquisitions) < 1:
        task.complete = True
        task.update_status("ERROR", "There are no acquistions for this parameter set.")
        return None

    if task.animated_product.animation_id != "none" and task.compositor.id == "median_pixel":
        task.complete = True
        task.update_status("ERROR", "Animations cannot be generated for median pixel operations.")
        return None

    if task.compositor.id == "median_pixel" and (task.time_end - task.time_start).days > 367:
        task.complete = True
        task.update_status("ERROR", "Median pixel operations are only supported for single year time periods.")
        return None

    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['products'][0], parameters['measurements']):
        parameters['measurements'] = ['blue', 'green', 'red', 'nir', 'swir1', 'swir2', 'pixel_qa']

    dc.close()
    return parameters
Example #10
0
def validate_parameters(parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None

    """
    task = AppNameTask.objects.get(pk=task_id)
    dc = DataAccessApi(config=task.config_path)

    #validate for any number of criteria here - num acquisitions, etc.
    # TODO: if this is not a multisensory app, replace list_combined_acquisition_dates with list_acquisition_dates
    acquisitions = dc.list_combined_acquisition_dates(**parameters)

    # TODO: are there any additional validations that need to be done here?
    if len(acquisitions) < 1:
        task.complete = True
        task.update_status("ERROR",
                           "There are no acquistions for this parameter set.")
        return None

    if task.animated_product.animation_id != "none" and task.compositor.id == "median_pixel":
        task.complete = True
        task.update_status(
            "ERROR",
            "Animations cannot be generated for median pixel operations.")
        return None

    task.update_status("WAIT", "Validated parameters.")

    # TODO: Check that the measurements exist - for Landsat, we're making sure that cf_mask/pixel_qa are interchangable.
    # replace ['products'][0] with ['products'] if this is not a multisensory app.
    if not dc.validate_measurements(parameters['products'][0],
                                    parameters['measurements']):
        parameters['measurements'] = [
            'blue', 'green', 'red', 'nir', 'swir1', 'swir2', 'pixel_qa'
        ]

    dc.close()
    return parameters
Example #11
0
def validate_parameters(self, parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None
    """
    task = CoastalChangeTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    dc = DataAccessApi(config=task.config_path)

    validation_params = dict(parameters)
    # verify that both the start and end year have acquisitions
    for year in parameters['time']:
        validation_params.update(
            {'time': (year, year.replace(year=year.year + 1))})
        acquisitions = dc.list_acquisition_dates(**validation_params)
        if len(acquisitions) < 1:
            task.complete = True
            task.update_status(
                "ERROR",
                "There must be at least one acquisition in both the start and ending year."
            )
            return None

    if check_cancel_task(self, task): return
    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['product'],
                                    parameters['measurements']):
        task.complete = True
        task.update_status(
            "ERROR",
            "The provided Satellite model measurements aren't valid for the product. Please check the measurements listed in the {} model."
            .format(task.satellite.name))
        return None

    dc.close()
    return parameters
Example #12
0
def validate_parameters(self, parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None
    """
    task = TsmTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    dc = DataAccessApi(config=task.config_path)

    acquisitions = dc.list_combined_acquisition_dates(**parameters)

    if len(acquisitions) < 1:
        task.complete = True
        task.update_status("ERROR",
                           "There are no acquistions for this parameter set.")
        return None

    if check_cancel_task(self, task): return
    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['products'][0],
                                    parameters['measurements']):
        task.complete = True
        task.update_status(
            "ERROR",
            "The provided Satellite model measurements aren't valid for the product. Please check the measurements listed in the {} model."
            .format(task.satellite.name))
        return None

    dc.close()
    return parameters
Example #13
0
def create_output_products(data, task_id=None):
    """Create the final output products for this algorithm.

    Open the final dataset and metadata and generate all remaining metadata.
    Convert and write the dataset to variuos formats and register all values in the task model
    Update status and exit.

    Args:
        data: tuple in the format of processing_task function - path, metadata, and {chunk ids}

    """
    logger.info("CREATE_OUTPUT")
    full_metadata = data[1]
    dataset = xr.open_dataset(data[0], autoclose=True)
    task = CoastalChangeTask.objects.get(pk=task_id)

    task.result_path = os.path.join(task.get_result_path(),
                                    "coastline_change.png")
    task.result_coastal_change_path = os.path.join(task.get_result_path(),
                                                   "coastal_change.png")
    task.result_mosaic_path = os.path.join(task.get_result_path(),
                                           "mosaic.png")
    task.data_path = os.path.join(task.get_result_path(), "data_tif.tif")
    task.data_netcdf_path = os.path.join(task.get_result_path(),
                                         "data_netcdf.nc")
    task.animation_path = os.path.join(task.get_result_path(
    ), "animation.gif") if task.animated_product.animation_id != 'none' else ""
    task.final_metadata_from_dataset(dataset)
    task.metadata_from_dict(full_metadata)

    bands = task.satellite.get_measurements() + [
        'coastal_change', 'coastline_old', 'coastline_new'
    ]

    png_bands = ['red', 'green', 'blue']

    dataset.to_netcdf(task.data_netcdf_path)
    write_geotiff_from_xr(task.data_path,
                          dataset.astype('int32'),
                          bands=bands,
                          no_data=task.satellite.no_data_value)
    write_png_from_xr(task.result_path,
                      mask_mosaic_with_coastlines(dataset),
                      bands=png_bands,
                      scale=task.satellite.get_scale(),
                      no_data=task.satellite.no_data_value)
    write_png_from_xr(task.result_coastal_change_path,
                      mask_mosaic_with_coastal_change(dataset),
                      bands=png_bands,
                      scale=task.satellite.get_scale(),
                      no_data=task.satellite.no_data_value)
    write_png_from_xr(task.result_mosaic_path,
                      dataset,
                      bands=png_bands,
                      scale=task.satellite.get_scale(),
                      no_data=task.satellite.no_data_value)

    if task.animated_product.animation_id != "none":
        with imageio.get_writer(task.animation_path, mode='I',
                                duration=1.0) as writer:
            for index in range(task.time_end - task.time_start):
                path = os.path.join(task.get_temp_path(),
                                    "animation_{}.png".format(index))
                if os.path.exists(path):
                    image = imageio.imread(path)
                    writer.append_data(image)

    logger.info("All products created.")
    task.rewrite_pathnames()
    # task.update_bounds_from_dataset(dataset)
    task.complete = True
    task.execution_end = datetime.now()
    task.update_status(
        "OK",
        "All products have been generated. Your result will be loaded on the map."
    )
    shutil.rmtree(task.get_temp_path())
    return True
Example #14
0
def create_output_products(data, task_id=None):
    """Create the final output products for this algorithm.

    Open the final dataset and metadata and generate all remaining metadata.
    Convert and write the dataset to variuos formats and register all values in the task model
    Update status and exit.

    Args:
        data: tuple in the format of processing_task function - path, metadata, and {chunk ids}

    """
    logger.info("CREATE_OUTPUT")
    full_metadata = data[1]
    dataset = xr.open_dataset(data[0], autoclose=True)
    task = NdviAnomalyTask.objects.get(pk=task_id)

    task.result_path = os.path.join(task.get_result_path(),
                                    "ndvi_difference.png")
    task.scene_ndvi_path = os.path.join(task.get_result_path(),
                                        "scene_ndvi.png")
    task.baseline_ndvi_path = os.path.join(task.get_result_path(),
                                           "baseline_ndvi.png")
    task.ndvi_percentage_change_path = os.path.join(
        task.get_result_path(), "ndvi_percentage_change.png")
    task.result_mosaic_path = os.path.join(task.get_result_path(),
                                           "result_mosaic.png")
    task.data_path = os.path.join(task.get_result_path(), "data_tif.tif")
    task.data_netcdf_path = os.path.join(task.get_result_path(),
                                         "data_netcdf.nc")
    task.final_metadata_from_dataset(dataset)
    task.metadata_from_dict(full_metadata)

    bands = task.satellite.get_measurements() + [
        'scene_ndvi', 'baseline_ndvi', 'ndvi_difference',
        'ndvi_percentage_change'
    ]

    dataset.to_netcdf(task.data_netcdf_path)

    write_geotiff_from_xr(task.data_path,
                          dataset.astype('float64'),
                          bands=bands,
                          no_data=task.satellite.no_data_value)
    write_single_band_png_from_xr(
        task.result_path,
        dataset,
        'ndvi_difference',
        color_scale=task.color_scales['ndvi_difference'],
        no_data=task.satellite.no_data_value)
    write_single_band_png_from_xr(
        task.ndvi_percentage_change_path,
        dataset,
        'ndvi_percentage_change',
        color_scale=task.color_scales['ndvi_percentage_change'],
        no_data=task.satellite.no_data_value)
    write_single_band_png_from_xr(task.scene_ndvi_path,
                                  dataset,
                                  'scene_ndvi',
                                  color_scale=task.color_scales['scene_ndvi'],
                                  no_data=task.satellite.no_data_value)
    write_single_band_png_from_xr(
        task.baseline_ndvi_path,
        dataset,
        'baseline_ndvi',
        color_scale=task.color_scales['baseline_ndvi'],
        no_data=task.satellite.no_data_value)

    write_png_from_xr(task.result_mosaic_path,
                      dataset,
                      bands=['red', 'green', 'blue'],
                      scale=task.satellite.get_scale(),
                      no_data=task.satellite.no_data_value)

    dates = list(
        map(lambda x: datetime.strptime(x, "%m/%d/%Y"),
            task._get_field_as_list('acquisition_list')))
    if len(dates) > 1:
        task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
        create_2d_plot(task.plot_path,
                       dates=dates,
                       datasets=task._get_field_as_list(
                           'clean_pixel_percentages_per_acquisition'),
                       data_labels="Clean Pixel Percentage (%)",
                       titles="Clean Pixel Percentage Per Acquisition")

    logger.info("All products created.")
    task.rewrite_pathnames()
    # task.update_bounds_from_dataset(dataset)
    task.complete = True
    task.execution_end = datetime.now()
    task.update_status(
        "OK",
        "All products have been generated. Your result will be loaded on the map."
    )
    shutil.rmtree(task.get_temp_path())
    return True
Example #15
0
def create_output_products(data, task_id=None):
    """Create the final output products for this algorithm.

    Open the final dataset and metadata and generate all remaining metadata.
    Convert and write the dataset to variuos formats and register all values in the task model
    Update status and exit.

    Args:
        data: tuple in the format of processing_task function - path, metadata, and {chunk ids}

    """
    logger.info("CREATE_OUTPUT")
    full_metadata = data[1]
    dataset = xr.open_dataset(data[0], autoclose=True)
    task = FractionalCoverTask.objects.get(pk=task_id)

    task.result_path = os.path.join(task.get_result_path(), "band_math.png")
    task.mosaic_path = os.path.join(task.get_result_path(), "png_mosaic.png")
    task.data_path = os.path.join(task.get_result_path(), "data_tif.tif")
    task.data_netcdf_path = os.path.join(task.get_result_path(),
                                         "data_netcdf.nc")
    task.final_metadata_from_dataset(dataset)
    task.metadata_from_dict(full_metadata)

    bands = [
        'blue', 'green', 'red', 'nir', 'swir1', 'swir2', 'cf_mask', 'pv',
        'npv', 'bs'
    ] if 'cf_mask' in dataset else [
        'blue', 'green', 'red', 'nir', 'swir1', 'swir2', 'pixel_qa', 'pv',
        'npv', 'bs'
    ]

    dataset.to_netcdf(task.data_netcdf_path)
    write_geotiff_from_xr(task.data_path, dataset.astype('int32'), bands=bands)
    write_png_from_xr(task.mosaic_path,
                      dataset,
                      bands=['red', 'green', 'blue'],
                      scale=(0, 4096))
    write_png_from_xr(task.result_path, dataset, bands=['bs', 'pv', 'npv'])

    dates = list(
        map(lambda x: datetime.strptime(x, "%m/%d/%Y"),
            task._get_field_as_list('acquisition_list')))
    if len(dates) > 1:
        task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
        create_2d_plot(task.plot_path,
                       dates=dates,
                       datasets=task._get_field_as_list(
                           'clean_pixel_percentages_per_acquisition'),
                       data_labels="Clean Pixel Percentage (%)",
                       titles="Clean Pixel Percentage Per Acquisition")

    logger.info("All products created.")
    # task.update_bounds_from_dataset(dataset)
    task.complete = True
    task.execution_end = datetime.now()
    task.update_status(
        "OK",
        "All products have been generated. Your result will be loaded on the map."
    )
    shutil.rmtree(task.get_temp_path())
    return True
Example #16
0
def create_output_products(self, data, task_id=None):
    """Create the final output products for this algorithm.

    Open the final dataset and metadata and generate all remaining metadata.
    Convert and write the dataset to variuos formats and register all values in the task model
    Update status and exit.

    Args:
        data: tuple in the format of processing_task function - path, metadata, and {chunk ids}
    """
    task = SpectralIndicesTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    full_metadata = data[1]
    dataset = xr.open_dataset(data[0])

    task.result_path = os.path.join(task.get_result_path(), "band_math.png")
    task.mosaic_path = os.path.join(task.get_result_path(), "png_mosaic.png")
    task.data_path = os.path.join(task.get_result_path(), "data_tif.tif")
    task.data_netcdf_path = os.path.join(task.get_result_path(),
                                         "data_netcdf.nc")
    task.final_metadata_from_dataset(dataset)
    task.metadata_from_dict(full_metadata)

    bands = task.satellite.get_measurements() + ['band_math']

    export_xarray_to_netcdf(dataset, task.data_netcdf_path)
    write_geotiff_from_xr(task.data_path,
                          dataset.astype('int32'),
                          bands=bands,
                          no_data=task.satellite.no_data_value)
    write_png_from_xr(task.mosaic_path,
                      dataset,
                      bands=['red', 'green', 'blue'],
                      scale=task.satellite.get_scale(),
                      no_data=task.satellite.no_data_value)
    write_single_band_png_from_xr(task.result_path,
                                  dataset,
                                  band='band_math',
                                  color_scale=task.color_scale_path.get(
                                      task.query_type.result_id),
                                  no_data=task.satellite.no_data_value)

    dates = list(
        map(lambda x: datetime.strptime(x, "%m/%d/%Y"),
            task._get_field_as_list('acquisition_list')))
    if len(dates) > 1:
        task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
        create_2d_plot(task.plot_path,
                       dates=dates,
                       datasets=task._get_field_as_list(
                           'clean_pixel_percentages_per_acquisition'),
                       data_labels="Clean Pixel Percentage (%)",
                       titles="Clean Pixel Percentage Per Acquisition")

    logger.info("All products created.")
    # task.update_bounds_from_dataset(dataset)
    task.complete = True
    task.execution_end = datetime.now()
    task.update_status(
        "OK",
        "All products have been generated. Your result will be loaded on the map."
    )
    return True
Example #17
0
def pixel_drill(task_id=None):
    parameters = parse_parameters_from_task(task_id=task_id)
    validate_parameters(parameters, task_id=task_id)
    task = SpectralIndicesTask.objects.get(pk=task_id)

    if task.status == "ERROR":
        return None

    dc = DataAccessApi(config=task.config_path)
    single_pixel = dc.get_dataset_by_extent(**parameters).isel(latitude=0,
                                                               longitude=0)
    clear_mask = task.satellite.get_clean_mask_func()(single_pixel)
    single_pixel = single_pixel.where(
        single_pixel != task.satellite.no_data_value)

    dates = single_pixel.time.values
    if len(dates) < 2:
        task.update_status(
            "ERROR",
            "There is only a single acquisition for your parameter set.")
        return None

    # spectral_indices_map = {
    #     'ndvi': lambda ds: (ds.nir - ds.red) / (ds.nir + ds.red),
    #     'evi': lambda ds: 2.5 * (ds.nir - ds.red) / (ds.nir + 6 * ds.red - 7.5 * ds.blue + 1),
    #     'savi': lambda ds: (ds.nir - ds.red) / (ds.nir + ds.red + 0.5) * (1.5),
    #     'nbr': lambda ds: (ds.nir - ds.swir2) / (ds.nir + ds.swir2),
    #     'nbr2': lambda ds: (ds.swir1 - ds.swir2) / (ds.swir1 + ds.swir2),
    #     'ndwi': lambda ds: (ds.nir - ds.swir1) / (ds.nir + ds.swir1),
    #     'ndbi': lambda ds: (ds.swir1 - ds.nir) / (ds.nir + ds.swir1),
    # }

    for spectral_index in spectral_indices_map:
        single_pixel[spectral_index] = spectral_indices_map[spectral_index](
            single_pixel)

    exclusion_list = task.satellite.get_measurements()
    plot_measurements = [
        band for band in single_pixel.data_vars if band not in exclusion_list
    ]

    datasets = [
        single_pixel[band].values.transpose() for band in plot_measurements
    ] + [clear_mask]
    data_labels = [
        stringcase.uppercase("{}".format(band)) for band in plot_measurements
    ] + ["Clear"]
    titles = [
        stringcase.uppercase("{}".format(band)) for band in plot_measurements
    ] + ["Clear Mask"]
    style = ['ro', 'go', 'bo', 'co', 'mo', 'yo', 'ko', '.']

    task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
    create_2d_plot(task.plot_path,
                   dates=dates,
                   datasets=datasets,
                   data_labels=data_labels,
                   titles=titles,
                   style=style)

    task.complete = True
    task.update_status("OK", "Done processing pixel drill.")
Example #18
0
def create_output_products(data, task_id=None):
    """Create the final output products for this algorithm.

    Open the final dataset and metadata and generate all remaining metadata.
    Convert and write the dataset to variuos formats and register all values in the task model
    Update status and exit.

    Args:
        data: tuple in the format of processing_task function - path, metadata, and {chunk ids}

    """
    logger.info("CREATE_OUTPUT")
    full_metadata = data[1]
    dataset = xr.open_dataset(data[0], autoclose=True)
    task = CustomMosaicToolTask.objects.get(pk=task_id)

    task.result_path = os.path.join(task.get_result_path(), "png_mosaic.png")
    task.result_filled_path = os.path.join(task.get_result_path(), "filled_png_mosaic.png")
    task.data_path = os.path.join(task.get_result_path(), "data_tif.tif")
    task.data_netcdf_path = os.path.join(task.get_result_path(), "data_netcdf.nc")
    task.animation_path = os.path.join(task.get_result_path(),
                                       "animation.gif") if task.animated_product.animation_id != 'none' else ""
    task.final_metadata_from_dataset(dataset)
    task.metadata_from_dict(full_metadata)

    bands = task.satellite.get_measurements()
    png_bands = [task.query_type.red, task.query_type.green, task.query_type.blue]

    dataset.to_netcdf(task.data_netcdf_path)
    write_geotiff_from_xr(task.data_path, dataset.astype('int32'), bands=bands, no_data=task.satellite.no_data_value)
    write_png_from_xr(
        task.result_path,
        dataset,
        bands=png_bands,
        png_filled_path=task.result_filled_path,
        fill_color=task.query_type.fill,
        scale=task.satellite.get_scale(),
        low_res=True,
        no_data=task.satellite.no_data_value)

    if task.animated_product.animation_id != "none":
        with imageio.get_writer(task.animation_path, mode='I', duration=1.0) as writer:
            valid_range = reversed(
                range(len(full_metadata))) if task.animated_product.animation_id == "scene" and task.get_reverse_time(
                ) else range(len(full_metadata))
            for index in valid_range:
                path = os.path.join(task.get_temp_path(), "animation_{}.png".format(index))
                if os.path.exists(path):
                    image = imageio.imread(path)
                    writer.append_data(image)

    dates = list(map(lambda x: datetime.strptime(x, "%m/%d/%Y"), task._get_field_as_list('acquisition_list')))
    if len(dates) > 1:
        task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
        create_2d_plot(
            task.plot_path,
            dates=dates,
            datasets=task._get_field_as_list('clean_pixel_percentages_per_acquisition'),
            data_labels="Clean Pixel Percentage (%)",
            titles="Clean Pixel Percentage Per Acquisition")

    logger.info("All products created.")
    # task.update_bounds_from_dataset(dataset)
    task.complete = True
    task.execution_end = datetime.now()
    task.update_status("OK", "All products have been generated. Your result will be loaded on the map.")
    shutil.rmtree(task.get_temp_path())
    return True
Example #19
0
def pixel_drill(task_id=None):
    parameters = parse_parameters_from_task(task_id=task_id)
    validate_parameters(parameters, task_id=task_id)
    task = FractionalCoverTask.objects.get(pk=task_id)

    if task.status == "ERROR":
        return None

    dc = DataAccessApi(config=task.config_path)
    single_pixel = dc.get_stacked_datasets_by_extent(**parameters)
    clear_mask = task.satellite.get_clean_mask_func()(single_pixel.isel(
        latitude=0, longitude=0))
    single_pixel = single_pixel.where(
        single_pixel != task.satellite.no_data_value)

    dates = single_pixel.time.values
    if len(dates) < 2:
        task.update_status(
            "ERROR",
            "There is only a single acquisition for your parameter set.")
        return None

    def _apply_band_math(ds, idx):
        # mask out water manually. Necessary for frac. cover.
        wofs = wofs_classify(ds, clean_mask=clear_mask[idx], mosaic=True)
        clear_mask[
            idx] = False if wofs.wofs.values[0] == 1 else clear_mask[idx]
        fractional_cover = frac_coverage_classify(
            ds,
            clean_mask=clear_mask[idx],
            no_data=task.satellite.no_data_value,
            platform=task.satellite.platform,
            collection=task.satellite.collection)
        return fractional_cover

    fractional_cover = xr.concat([
        _apply_band_math(single_pixel.isel(time=data_point, drop=True),
                         data_point) for data_point in range(len(dates))
    ],
                                 dim='time')

    fractional_cover = fractional_cover.where(
        fractional_cover != task.satellite.no_data_value).isel(latitude=0,
                                                               longitude=0)

    exclusion_list = []
    plot_measurements = [
        band for band in fractional_cover.data_vars
        if band not in exclusion_list
    ]

    datasets = [
        fractional_cover[band].values.transpose() for band in plot_measurements
    ] + [clear_mask]
    data_labels = [
        stringcase.titlecase("%{}".format(band)) for band in plot_measurements
    ] + ["Clear"]
    titles = [
        'Bare Soil Percentage', 'Photosynthetic Vegetation Percentage',
        'Non-Photosynthetic Vegetation Percentage', 'Clear Mask'
    ]
    style = ['ro', 'go', 'bo', '.']

    task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
    create_2d_plot(task.plot_path,
                   dates=dates,
                   datasets=datasets,
                   data_labels=data_labels,
                   titles=titles,
                   style=style)

    task.complete = True
    task.update_status("OK", "Done processing pixel drill.")
Example #20
0
def start_chunk_processing(self, chunk_details, task_id=None):
    """Create a fully asyncrhonous processing pipeline from paramters and a list of chunks.

    The most efficient way to do this is to create a group of time chunks for each geographic chunk,
    recombine over the time index, then combine geographic last.
    If we create an animation, this needs to be reversed - e.g. group of geographic for each time,
    recombine over geographic, then recombine time last.

    The full processing pipeline is completed, then the create_output_products task is triggered, completing the task.
    """
    if chunk_details is None:
        return None

    parameters = chunk_details.get('parameters')
    geographic_chunks = chunk_details.get('geographic_chunks')

    task = SpectralAnomalyTask.objects.get(pk=task_id)

    api = DataAccessApi(config=task.config_path)

    # Get an estimate of the amount of work to be done: the number of scenes
    # to process, also considering intermediate chunks to be combined.
    # Determine the number of scenes for the baseline and analysis extents.
    num_scenes = {}
    params_temp = parameters.copy()
    for composite_name in ['baseline', 'analysis']:
        num_scenes[composite_name] = 0
        for geographic_chunk in geographic_chunks:
            params_temp.update(geographic_chunk)
            params_temp['measurements'] = []
            # Use the corresponding time range for the baseline and analysis data.
            params_temp['time'] = \
                params_temp['baseline_time' if composite_name == 'baseline' else 'analysis_time']
            params_temp_clean = params_temp.copy()
            del params_temp_clean['baseline_time'], params_temp_clean['analysis_time'], \
                params_temp_clean['composite_range'], params_temp_clean['change_range']
            data = api.dc.load(**params_temp_clean)
            if 'time' in data.coords:
                num_scenes[composite_name] += len(data.time)
    # The number of scenes per geographic chunk for baseline and analysis extents.
    num_scn_per_chk_geo = {
        k: round(v / len(geographic_chunks))
        for k, v in num_scenes.items()
    }
    # Scene processing progress is tracked in processing_task().
    task.total_scenes = sum(num_scenes.values())
    task.scenes_processed = 0
    task.save(update_fields=['total_scenes', 'scenes_processed'])

    if check_cancel_task(self, task): return
    task.update_status("WAIT", "Starting processing.")

    processing_pipeline = (group([
            processing_task.s(
                task_id=task_id,
                geo_chunk_id=geo_index,
                geographic_chunk=geographic_chunk,
                num_scn_per_chk=num_scn_per_chk_geo,
                **parameters) for geo_index, geographic_chunk in enumerate(geographic_chunks)
    ]) | recombine_geographic_chunks.s(task_id=task_id) | create_output_products.s(task_id=task_id) \
       | task_clean_up.si(task_id=task_id, task_model='SpectralAnomalyTask')).apply_async()

    return True
Example #21
0
def create_output_products(self, data, task_id=None):
    """Create the final output products for this algorithm.

    Open the final dataset and metadata and generate all remaining metadata.
    Convert and write the dataset to various formats and register all values in the task model
    Update status and exit.

    Args:
        data: tuple in the format of processing_task function - path, metadata, and {chunk ids}

    """
    if data is None: return None

    task = SpectralAnomalyTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    spectral_index = task.query_type.result_id

    full_metadata = data[3]
    # This is the difference (or "change") composite.
    diff_composite = xr.open_dataset(data[0])
    # This indicates where either the baseline or analysis composite
    # was outside the corresponding user-specified range.
    orig_composite_out_of_range = xr.open_dataset(data[1]) \
        [spectral_index].astype(np.bool).values
    # This indicates where either the baseline or analysis composite
    # was the no_data value.
    composite_no_data = xr.open_dataset(data[2]) \
        [spectral_index].astype(np.bool).values

    # Obtain a NumPy array of the data to create a plot later.
    if spectral_index in ['ndvi', 'ndbi', 'ndwi', 'evi']:
        diff_comp_np_arr = diff_composite[spectral_index].values
    else:  # Fractional Cover
        diff_comp_np_arr = diff_composite['pv'].values
    diff_comp_np_arr[composite_no_data] = np.nan

    task.data_netcdf_path = os.path.join(task.get_result_path(),
                                         "data_netcdf.nc")
    task.data_path = os.path.join(task.get_result_path(), "data_tif.tif")
    task.result_path = os.path.join(task.get_result_path(), "png_mosaic.png")
    task.final_metadata_from_dataset(diff_composite)
    task.metadata_from_dict(full_metadata)

    # 1. Prepare to save the spectral index net change as a GeoTIFF and NetCDF.
    if spectral_index in ['ndvi', 'ndbi', 'ndwi', 'evi']:
        bands = [spectral_index]
    else:  # Fractional Coverage
        bands = ['bs', 'pv', 'npv']
    # 2. Prepare to create a PNG of the spectral index change composite.
    # 2.1. Find the min and max possible difference for the selected spectral index.
    spec_ind_min, spec_ind_max = spectral_indices_range_map[spectral_index]
    diff_min_possible, diff_max_possible = spec_ind_min - spec_ind_max, spec_ind_max - spec_ind_min
    # 2.2. Scale the difference composite to the range [0, 1] for plotting.
    image_data = np.interp(diff_comp_np_arr,
                           (diff_min_possible, diff_max_possible), (0, 1))
    # 2.3. Color by region.
    # 2.3.1. First, color by change.
    # If the user specified a change value range, the product is binary -
    # denoting which pixels fall within the net change threshold.
    cng_min, cng_max = task.change_threshold_min, task.change_threshold_max
    if cng_min is not None and cng_max is not None:
        image_data = np.empty((*image_data.shape, 4), dtype=image_data.dtype)
        image_data[:, :] = mpl.colors.to_rgba('red')
    else:  # otherwise, use a red-green gradient.
        cmap = plt.get_cmap('RdYlGn')
        image_data = cmap(image_data)
    # 2.3.2. Second, color regions in which the change was outside
    #        the optional user-specified change value range.
    change_out_of_range_color = mpl.colors.to_rgba('black')
    if cng_min is not None and cng_max is not None:
        diff_composite_out_of_range = (diff_comp_np_arr <
                                       cng_min) | (cng_max < diff_comp_np_arr)
        image_data[diff_composite_out_of_range] = change_out_of_range_color
    # 2.3.3. Third, color regions in which either the baseline or analysis
    #        composite was outside the user-specified composite value range.
    composite_out_of_range_color = mpl.colors.to_rgba('white')
    image_data[orig_composite_out_of_range] = composite_out_of_range_color
    #  2.3.4. Fourth, color regions in which either the baseline or analysis
    #         composite was the no_data value as transparent.
    composite_no_data_color = np.array([0., 0., 0., 0.])
    image_data[composite_no_data] = composite_no_data_color

    # Create output products (NetCDF, GeoTIFF, PNG).
    export_xarray_to_netcdf(diff_composite, task.data_netcdf_path)
    write_geotiff_from_xr(task.data_path,
                          diff_composite.astype('float32'),
                          bands=bands,
                          no_data=task.satellite.no_data_value)
    plt.imsave(task.result_path, image_data)

    # Plot metadata.
    dates = list(
        map(lambda x: datetime.strptime(x, "%m/%d/%Y"),
            task._get_field_as_list('acquisition_list')))
    if len(dates) > 1:
        task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
        create_2d_plot(task.plot_path,
                       dates=dates,
                       datasets=task._get_field_as_list(
                           'clean_pixel_percentages_per_acquisition'),
                       data_labels="Clean Pixel Percentage (%)",
                       titles="Clean Pixel Percentage Per Acquisition")

    task.complete = True
    task.execution_end = datetime.now()
    task.update_status(
        "OK",
        "All products have been generated. Your result will be loaded on the map."
    )
    return True
Example #22
0
def create_output_products(data, task_id=None):
    """Create the final output products for this algorithm.

    Open the final dataset and metadata and generate all remaining metadata.
    Convert and write the dataset to variuos formats and register all values in the task model
    Update status and exit.

    Args:
        data: tuple in the format of processing_task function - path, metadata, and {chunk ids}

    """
    logger.info("CREATE_OUTPUT")
    full_metadata = data[1]
    dataset = xr.open_dataset(data[0], autoclose=True)
    task = AppNameTask.objects.get(pk=task_id)

    # TODO: Add any paths that you've added in your models.py Result model and remove the ones that aren't there.
    task.result_path = os.path.join(task.get_result_path(), "png_mosaic.png")
    task.result_filled_path = os.path.join(task.get_result_path(),
                                           "filled_png_mosaic.png")
    task.data_path = os.path.join(task.get_result_path(), "data_tif.tif")
    task.data_netcdf_path = os.path.join(task.get_result_path(),
                                         "data_netcdf.nc")
    task.animation_path = os.path.join(task.get_result_path(
    ), "animation.gif") if task.animated_product.animation_id != 'none' else ""
    task.final_metadata_from_dataset(dataset)
    task.metadata_from_dict(full_metadata)

    # TODO: Set the bands that should be written to the final products
    bands = ['blue', 'green', 'red', 'nir', 'swir1', 'swir2', 'cf_mask'
             ] if 'cf_mask' in dataset else [
                 'blue', 'green', 'red', 'nir', 'swir1', 'swir2', 'pixel_qa'
             ]

    # TODO: If you're creating pngs, specify the RGB bands
    png_bands = [
        task.query_type.red, task.query_type.green, task.query_type.blue
    ]

    dataset.to_netcdf(task.data_netcdf_path)
    write_geotiff_from_xr(task.data_path, dataset.astype('int32'), bands=bands)
    write_png_from_xr(task.result_path,
                      dataset,
                      bands=png_bands,
                      png_filled_path=task.result_filled_path,
                      fill_color=task.query_type.fill,
                      scale=(0, 4096))

    # TODO: if there is no animation, remove this. Otherwise, open each time iteration slice and write to disk.
    if task.animated_product.animation_id != "none":
        with imageio.get_writer(task.animation_path, mode='I',
                                duration=1.0) as writer:
            valid_range = reversed(
                range(len(full_metadata))
            ) if task.animated_product.animation_id == "scene" and task.get_reverse_time(
            ) else range(len(full_metadata))
            for index in valid_range:
                path = os.path.join(task.get_temp_path(),
                                    "animation_{}.png".format(index))
                if os.path.exists(path):
                    image = imageio.imread(path)
                    writer.append_data(image)

    # TODO: if you're capturing more tabular metadata, plot it here by converting these to lists.
    # an example of this is the current water detection app.
    dates = list(
        map(lambda x: datetime.strptime(x, "%m/%d/%Y"),
            task._get_field_as_list('acquisition_list')))
    if len(dates) > 1:
        task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
        create_2d_plot(task.plot_path,
                       dates=dates,
                       datasets=task._get_field_as_list(
                           'clean_pixel_percentages_per_acquisition'),
                       data_labels="Clean Pixel Percentage (%)",
                       titles="Clean Pixel Percentage Per Acquisition")

    logger.info("All products created.")
    # task.update_bounds_from_dataset(dataset)
    task.complete = True
    task.execution_end = datetime.now()
    task.update_status(
        "OK",
        "All products have been generated. Your result will be loaded on the map."
    )
    shutil.rmtree(task.get_temp_path())
    return True
Example #23
0
def create_output_products(data, task_id=None):
    """Create the final output products for this algorithm.

    Open the final dataset and metadata and generate all remaining metadata.
    Convert and write the dataset to variuos formats and register all values in the task model
    Update status and exit.

    Args:
        data: tuple in the format of processing_task function - path, metadata, and {chunk ids}

    """
    logger.info("CREATE_OUTPUT")
    full_metadata = data[1]
    dataset = xr.open_dataset(data[0], autoclose=True).astype('float64')
    task = WaterDetectionTask.objects.get(pk=task_id)

    task.result_path = os.path.join(task.get_result_path(), "water_percentage.png")
    task.water_observations_path = os.path.join(task.get_result_path(), "water_observations.png")
    task.clear_observations_path = os.path.join(task.get_result_path(), "clear_observations.png")
    task.data_path = os.path.join(task.get_result_path(), "data_tif.tif")
    task.data_netcdf_path = os.path.join(task.get_result_path(), "data_netcdf.nc")
    task.animation_path = os.path.join(task.get_result_path(),
                                       "animation.gif") if task.animated_product.animation_id != 'none' else ""
    task.final_metadata_from_dataset(dataset)
    task.metadata_from_dict(full_metadata)

    bands = ['normalized_data', 'total_data', 'total_clean']
    band_paths = [task.result_path, task.water_observations_path, task.clear_observations_path]

    dataset.to_netcdf(task.data_netcdf_path)
    write_geotiff_from_xr(task.data_path, dataset, bands=bands, no_data=task.satellite.no_data_value)

    for band, band_path in zip(bands, band_paths):
        write_single_band_png_from_xr(
            band_path,
            dataset,
            band,
            color_scale=task.color_scales[band],
            fill_color=task.query_type.fill,
            interpolate=False,
            no_data=task.satellite.no_data_value)

    if task.animated_product.animation_id != "none":
        with imageio.get_writer(task.animation_path, mode='I', duration=1.0) as writer:
            valid_range = range(len(full_metadata))
            for index in valid_range:
                path = os.path.join(task.get_temp_path(), "animation_{}.png".format(index))
                if os.path.exists(path):
                    image = imageio.imread(path)
                    writer.append_data(image)

    dates = list(map(lambda x: datetime.strptime(x, "%m/%d/%Y"), task._get_field_as_list('acquisition_list')))
    if len(dates) > 1:
        task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
        create_2d_plot(
            task.plot_path,
            dates=dates,
            datasets=[
                task._get_field_as_list('clean_pixel_percentages_per_acquisition'), [
                    int(x) / max(int(y), 1)
                    for x, y in zip(
                        task._get_field_as_list('water_pixels_per_acquisition'),
                        task._get_field_as_list('clean_pixels_per_acquisition'))
                ]
            ],
            data_labels=["Clean Pixel Percentage (%)", "Water Pixel Percentage (%)"],
            titles=["Clean Pixel Percentage Per Acquisition", "Water Pixels Percentage Per Acquisition"])

    logger.info("All products created.")
    # task.update_bounds_from_dataset(dataset)
    task.complete = True
    task.execution_end = datetime.now()
    task.update_status("OK", "All products have been generated. Your result will be loaded on the map.")
    shutil.rmtree(task.get_temp_path())
    return True
Example #24
0
def create_output_products(self, data, task_id=None):
    """Create the final output products for this algorithm.

    Open the final dataset and metadata and generate all remaining metadata.
    Convert and write the dataset to variuos formats and register all values in the task model
    Update status and exit.

    Args:
        data: tuple in the format of processing_task function - path, metadata, and {chunk ids}
    """
    task = TsmTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    full_metadata = data[1]
    dataset = xr.open_dataset(data[0]).astype('float64')
    dataset['variability'] = dataset['max'] - dataset['normalized_data']
    dataset['wofs'] = dataset.wofs / dataset.wofs_total_clean
    nan_to_num(dataset, 0)
    dataset_masked = mask_water_quality(dataset, dataset.wofs)

    task.result_path = os.path.join(task.get_result_path(), "tsm.png")
    task.clear_observations_path = os.path.join(task.get_result_path(), "clear_observations.png")
    task.water_percentage_path = os.path.join(task.get_result_path(), "water_percentage.png")
    task.data_path = os.path.join(task.get_result_path(), "data_tif.tif")
    task.data_netcdf_path = os.path.join(task.get_result_path(), "data_netcdf.nc")
    task.animation_path = os.path.join(task.get_result_path(),
                                       "animation.gif") if task.animated_product.animation_id != 'none' else ""
    task.final_metadata_from_dataset(dataset_masked)
    task.metadata_from_dict(full_metadata)

    bands = [task.query_type.data_variable, 'total_clean', 'wofs']
    band_paths = [task.result_path, task.clear_observations_path, task.water_percentage_path]

    dataset_masked.to_netcdf(task.data_netcdf_path)
    write_geotiff_from_xr(task.data_path, dataset_masked, bands=bands, no_data=task.satellite.no_data_value)

    for band, band_path in zip(bands, band_paths):
        write_single_band_png_from_xr(
            band_path,
            dataset_masked,
            band,
            color_scale=task.color_scales[band],
            fill_color='black',
            interpolate=False,
            no_data=task.satellite.no_data_value)

    if task.animated_product.animation_id != "none":
        with imageio.get_writer(task.animation_path, mode='I', duration=1.0) as writer:
            valid_range = range(len(full_metadata))
            for index in valid_range:
                path = os.path.join(task.get_temp_path(), "animation_final_{}.nc".format(index))
                if os.path.exists(path):
                    png_path = os.path.join(task.get_temp_path(), "animation_{}.png".format(index))
                    animated_data = mask_water_quality(
                        xr.open_dataset(path).astype('float64'),
                        dataset.wofs) if task.animated_product.animation_id != "scene" else xr.open_dataset(
                            path)
                    write_single_band_png_from_xr(
                        png_path,
                        animated_data,
                        task.animated_product.data_variable,
                        color_scale=task.color_scales[task.animated_product.data_variable],
                        fill_color='black',
                        interpolate=False,
                        no_data=task.satellite.no_data_value)
                    image = imageio.imread(png_path)
                    writer.append_data(image)

    dates = list(map(lambda x: datetime.strptime(x, "%m/%d/%Y"), task._get_field_as_list('acquisition_list')))
    if len(dates) > 1:
        task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
        create_2d_plot(
            task.plot_path,
            dates=dates,
            datasets=task._get_field_as_list('clean_pixel_percentages_per_acquisition'),
            data_labels="Clean Pixel Percentage (%)",
            titles="Clean Pixel Percentage Per Acquisition")

    logger.info("All products created.")
    task.update_bounds_from_dataset(dataset_masked)
    task.complete = True
    task.execution_end = datetime.now()
    task.update_status("OK", "All products have been generated. Your result will be loaded on the map.")
    return True
Example #25
0
def start_chunk_processing(self, chunk_details, task_id=None):
    """Create a fully asyncrhonous processing pipeline from paramters and a list of chunks.

    The most efficient way to do this is to create a group of time chunks for each geographic chunk,
    recombine over the time index, then combine geographic last.
    If we create an animation, this needs to be reversed - e.g. group of geographic for each time,
    recombine over geographic, then recombine time last.

    The full processing pipeline is completed, then the create_output_products task is triggered, completing the task.
    """
    if chunk_details is None:
        return None

    parameters = chunk_details.get('parameters')
    geographic_chunks = chunk_details.get('geographic_chunks')
    time_chunks = chunk_details.get('time_chunks')

    task = FractionalCoverTask.objects.get(pk=task_id)

    # Get an estimate of the amount of work to be done: the number of scenes
    # to process, also considering intermediate chunks to be combined.
    num_scenes = len(geographic_chunks) * sum(
        [len(time_chunk) for time_chunk in time_chunks])
    # recombine_time_chunks() and process_band_math() scenes:
    # num_scn_per_chk * len(time_chunks) * len(geographic_chunks)
    num_scn_per_chk = round(num_scenes /
                            (len(time_chunks) * len(geographic_chunks)))
    # recombine_geographic_chunks() and create_output_products() scenes:
    # num_scn_per_chk_geo * len(geographic_chunks)
    num_scn_per_chk_geo = round(num_scenes / len(geographic_chunks))
    # Scene processing progress is tracked in: processing_task(), recombine_time_chunks(),
    # and process_band_math(). Scenes in process_band_math() are counted twice
    # for the sake of tracking progress because it takes so long to run. So 1 + 1 + 2 = 4.
    task.total_scenes = 4 * num_scenes
    task.scenes_processed = 0
    task.save(update_fields=['total_scenes', 'scenes_processed'])

    if check_cancel_task(self, task): return
    task.update_status("WAIT", "Starting processing.")

    logger.info("START_CHUNK_PROCESSING")

    processing_pipeline = (
        group([
            group([
                processing_task.s(task_id=task_id,
                                  geo_chunk_id=geo_index,
                                  time_chunk_id=time_index,
                                  geographic_chunk=geographic_chunk,
                                  time_chunk=time_chunk,
                                  **parameters)
                for time_index, time_chunk in enumerate(time_chunks)
            ]) | recombine_time_chunks.s(task_id=task_id,
                                         num_scn_per_chk=num_scn_per_chk)
            | process_band_math.s(task_id=task_id,
                                  num_scn_per_chk=2 * num_scn_per_chk_geo)
            for geo_index, geographic_chunk in enumerate(geographic_chunks)
        ]) | recombine_geographic_chunks.s(task_id=task_id)
        | create_output_products.s(task_id=task_id)
        | task_clean_up.si(task_id=task_id,
                           task_model='FractionalCoverTask')).apply_async()

    return True