Beispiel #1
0
def pixel_drill(task_id=None):
    parameters = parse_parameters_from_task(task_id=task_id)
    validate_parameters(parameters, task_id=task_id)
    task = CustomMosaicToolTask.objects.get(pk=task_id)

    if task.status == "ERROR":
        return None

    dc = DataAccessApi(config=task.config_path)
    single_pixel = dc.get_stacked_datasets_by_extent(**parameters).isel(latitude=0, longitude=0)
    clear_mask = task.satellite.get_clean_mask_func()(single_pixel)
    single_pixel = single_pixel.where(single_pixel != task.satellite.no_data_value)

    dates = single_pixel.time.values
    if len(dates) < 2:
        task.update_status("ERROR", "There is only a single acquisition for your parameter set.")
        return None

    exclusion_list = ['satellite', 'pixel_qa']
    plot_measurements = [band for band in single_pixel.data_vars if band not in exclusion_list]

    datasets = [single_pixel[band].values.transpose() for band in plot_measurements] + [clear_mask]
    data_labels = [stringcase.titlecase("{} Units".format(band)) for band in plot_measurements] + ["Clear"]
    titles = [stringcase.titlecase("{} Band".format(band)) for band in plot_measurements] + ["Clear Mask"]
    style = ['r-o', 'g-o', 'b-o', 'c-o', 'm-o', 'y-o', '.']

    task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
    create_2d_plot(task.plot_path, dates=dates, datasets=datasets, data_labels=data_labels, titles=titles, style=style)

    task.complete = True
    task.update_status("OK", "Done processing pixel drill.")
Beispiel #2
0
def get_acquisition_list(task, area_id, satellite, date):
    dc = DataAccessApi(config=task.config_path)
    # lists all acquisition dates for use in single tmeslice queries.
    # satellite.product_prefix +
    product = area_id
    acquisitions = dc.list_acquisition_dates(product, satellite.datacube_platform, time=(datetime(1900, 1, 1), date))
    return acquisitions
Beispiel #3
0
def perform_task_chunking(self, parameters, task_id=None):
    """Chunk parameter sets into more manageable sizes

    Uses functions provided by the task model to create a group of
    parameter sets that make up the arg.

    Args:
        parameters: parameter stream containing all kwargs to load data

    Returns:
        parameters with a list of geographic and time ranges
    """
    if parameters is None:
        return None

    task = SpectralAnomalyTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    dc = DataAccessApi(config=task.config_path)
    task_chunk_sizing = task.get_chunk_size()

    geographic_chunks = create_geographic_chunks(
        longitude=parameters['longitude'],
        latitude=parameters['latitude'],
        geographic_chunk_size=task_chunk_sizing['geographic'])

    # This app does not currently support time chunking.

    dc.close()
    if check_cancel_task(self, task): return
    task.update_status("WAIT", "Chunked parameter set.")

    return {'parameters': parameters, 'geographic_chunks': geographic_chunks}
Beispiel #4
0
def pixel_drill(task_id=None):
    parameters = parse_parameters_from_task(task_id=task_id)
    validate_parameters(parameters, task_id=task_id)
    task = UrbanizationTask.objects.get(pk=task_id)

    if task.status == "ERROR":
        return None

    dc = DataAccessApi(config=task.config_path)
    single_pixel = dc.get_dataset_by_extent(**parameters).isel(latitude=0, longitude=0)
    clear_mask = task.satellite.get_clean_mask_func()(single_pixel)
    single_pixel = single_pixel.where(single_pixel != task.satellite.no_data_value)

    dates = single_pixel.time.values
    if len(dates) < 2:
        task.update_status("ERROR", "There is only a single acquisition for your parameter set.")
        return None

    datasets = [data_array.values.transpose() for data_array in _apply_band_math(single_pixel)] + [clear_mask]
    data_labels = ["NDVI", "NDWI", "NDBI"] + ["Clear"]
    titles = ["Dense Vegetatin (NDVI)", "Water Concentration (NDWI)", "Urbanization (NDBI)", 'Clear Mask']
    style = ['go', 'bo', 'ro', '.']

    task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
    create_2d_plot(task.plot_path, dates=dates, datasets=datasets, data_labels=data_labels, titles=titles, style=style)

    task.complete = True
    task.update_status("OK", "Done processing pixel drill.")
Beispiel #5
0
def pixel_drill(task_id=None):
    parameters = parse_parameters_from_task(task_id=task_id)
    validate_parameters(parameters, task_id=task_id)
    task = TsmTask.objects.get(pk=task_id)

    if task.status == "ERROR":
        return None

    dc = DataAccessApi(config=task.config_path)
    single_pixel = dc.get_stacked_datasets_by_extent(**parameters)
    clear_mask = task.satellite.get_clean_mask_func()(single_pixel)
    single_pixel = single_pixel.where(single_pixel != task.satellite.no_data_value)

    dates = single_pixel.time.values
    if len(dates) < 2:
        task.update_status("ERROR", "There is only a single acquisition for your parameter set.")
        return None

    # Ensure data variables have the range of Landsat 7 Collection 1 Level 2
    # since the color scales are tailored for that dataset.
    platform = task.satellite.platform
    collection = task.satellite.collection
    level = task.satellite.level
    if (platform, collection) != ('LANDSAT_7', 'c1'):
        single_pixel = \
            convert_range(single_pixel, from_platform=platform, 
                        from_collection=collection, from_level=level,
                        to_platform='LANDSAT_7', to_collection='c1', to_level='l2')

    wofs_data = task.get_processing_method()(single_pixel,
                                             clean_mask=clear_mask,
                                             no_data=task.satellite.no_data_value)
    wofs_data = \
        wofs_data.where(wofs_data != task.satellite.no_data_value)
    wofs_data = wofs_data.squeeze()
    tsm_data = \
        tsm(single_pixel, clean_mask=clear_mask, no_data=task.satellite.no_data_value)
    tsm_data = \
        tsm_data.where(tsm_data != task.satellite.no_data_value)\
        .squeeze().where(wofs_data.wofs.values == 1)

    # Remove NaNs to avoid errors and yield a nicer plot.
    water_non_nan_times = ~np.isnan(wofs_data.wofs.values)
    wofs_data = wofs_data.isel(time=water_non_nan_times)
    tsm_non_nan_times = ~np.isnan(tsm_data.tsm.values)
    tsm_data = tsm_data.isel(time=tsm_non_nan_times)

    datasets = [wofs_data.wofs.values.transpose().squeeze(), 
                tsm_data.tsm.values.transpose().squeeze()] + \
                [clear_mask.squeeze()]
    dates = [dates[water_non_nan_times], dates[tsm_non_nan_times]] + [dates]
    data_labels = ["Water/Non Water", "TSM (g/L)"] + ["Clear"]
    titles = ["Water/Non Water", "TSM Values"] + ["Clear Mask"]
    style = ['.', 'ro', '.']

    task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
    create_2d_plot(task.plot_path, dates=dates, datasets=datasets, data_labels=data_labels, titles=titles, style=style)

    task.complete = True
    task.update_status("OK", "Done processing pixel drill.")
Beispiel #6
0
def perform_task_chunking(self, parameters, task_id=None):
    """Chunk parameter sets into more manageable sizes

    Uses functions provided by the task model to create a group of
    parameter sets that make up the arg.

    Args:
        parameters: parameter stream containing all kwargs to load data

    Returns:
        parameters with a list of geographic and time ranges
    """
    if parameters is None:
        return None

    task = TsmTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    dc = DataAccessApi(config=task.config_path)
    dates = dc.list_combined_acquisition_dates(**parameters)
    task_chunk_sizing = task.get_chunk_size()

    geographic_chunks = create_geographic_chunks(
        longitude=parameters['longitude'],
        latitude=parameters['latitude'],
        geographic_chunk_size=task_chunk_sizing['geographic'])

    time_chunks = create_time_chunks(
        dates, _reversed=task.get_reverse_time(), time_chunk_size=task_chunk_sizing['time'])
    logger.info("Time chunks: {}, Geo chunks: {}".format(len(time_chunks), len(geographic_chunks)))

    dc.close()
    if check_cancel_task(self, task): return
    task.update_status("WAIT", "Chunked parameter set.")
    return {'parameters': parameters, 'geographic_chunks': geographic_chunks, 'time_chunks': time_chunks}
Beispiel #7
0
def pixel_drill(task_id=None):
    parameters = parse_parameters_from_task(task_id=task_id)
    validate_parameters(parameters, task_id=task_id)
    task = WaterDetectionTask.objects.get(pk=task_id)

    if task.status == "ERROR":
        return None

    dc = DataAccessApi(config=task.config_path)
    single_pixel = dc.get_stacked_datasets_by_extent(**parameters)
    clear_mask = task.satellite.get_clean_mask_func()(single_pixel.isel(latitude=0, longitude=0))
    single_pixel = single_pixel.where(single_pixel != task.satellite.no_data_value)

    dates = single_pixel.time.values
    if len(dates) < 2:
        task.update_status("ERROR", "There is only a single acquisition for your parameter set.")
        return None

    wofs_data = task.get_processing_method()(single_pixel,
                                             clean_mask=clear_mask,
                                             enforce_float64=True,
                                             no_data=task.satellite.no_data_value)
    wofs_data = wofs_data.where(wofs_data != task.satellite.no_data_value).isel(latitude=0, longitude=0)

    datasets = [wofs_data.wofs.values.transpose()] + [clear_mask]
    data_labels = ["Water/Non Water"] + ["Clear"]
    titles = ["Water/Non Water"] + ["Clear Mask"]
    style = ['.', '.']

    task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
    create_2d_plot(task.plot_path, dates=dates, datasets=datasets, data_labels=data_labels, titles=titles, style=style)

    task.complete = True
    task.update_status("OK", "Done processing pixel drill.")
Beispiel #8
0
def perform_task_chunking(self, parameters, task_id=None):
    """Chunk parameter sets into more manageable sizes

    Uses functions provided by the task model to create a group of
    parameter sets that make up the arg.

    Args:
        parameters: parameter stream containing all kwargs to load data

    Returns:
        parameters with a list of geographic and time ranges
    """
    if parameters is None:
        return None

    task = NdviAnomalyTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    dc = DataAccessApi(config=task.config_path)
    task_chunk_sizing = task.get_chunk_size()

    geographic_chunks = create_geographic_chunks(
        longitude=parameters['longitude'],
        latitude=parameters['latitude'],
        geographic_chunk_size=task_chunk_sizing['geographic'])

    grouped_dates_params = {**parameters}
    grouped_dates_params.update({
        'time': (datetime(1000, 1,
                          1), task.time_start - timedelta(microseconds=1))
    })
    acquisitions = dc.list_acquisition_dates(**grouped_dates_params)
    grouped_dates = group_datetimes_by_month(
        acquisitions,
        months=list(map(int, task.baseline_selection.split(","))))
    # create a single monolithic list of all acq. dates - there should be only one.
    time_chunks = []
    for date_group in grouped_dates:
        time_chunks.extend(grouped_dates[date_group])
    # time chunks casted to a list, essnetially.
    time_chunks = [time_chunks]

    logger.info("Time chunks: {}, Geo chunks: {}".format(
        len(time_chunks), len(geographic_chunks)))

    dc.close()
    if check_cancel_task(self, task): return
    task.update_status("WAIT", "Chunked parameter set.")
    return {
        'parameters': parameters,
        'geographic_chunks': geographic_chunks,
        'time_chunks': time_chunks
    }
Beispiel #9
0
def perform_task_chunking(parameters, task_id=None):
    """Chunk parameter sets into more manageable sizes

    Uses functions provided by the task model to create a group of
    parameter sets that make up the arg.

    Args:
        parameters: parameter stream containing all kwargs to load data

    Returns:
        parameters with a list of geographic and time ranges

    """

    if parameters is None:
        return None

    task = CoastalChangeTask.objects.get(pk=task_id)
    dc = DataAccessApi(config=task.config_path)

    dates = dc.list_acquisition_dates(**parameters)
    task_chunk_sizing = task.get_chunk_size()

    geographic_chunks = create_geographic_chunks(
        longitude=parameters['longitude'],
        latitude=parameters['latitude'],
        geographic_chunk_size=task_chunk_sizing['geographic'])

    grouped_dates = group_datetimes_by_year(dates)
    # we need to pair these with the first year - subsequent years.
    time_chunks = None
    if task.animated_product.animation_id == 'none':
        # first and last only
        time_chunks = [[
            grouped_dates[task.time_start], grouped_dates[task.time_end]
        ]]
    else:
        initial_year = grouped_dates.pop(task.time_start)
        time_chunks = [[initial_year, grouped_dates[year]]
                       for year in grouped_dates]
    logger.info("Time chunks: {}, Geo chunks: {}".format(
        len(time_chunks), len(geographic_chunks)))

    dc.close()
    task.update_status("WAIT", "Chunked parameter set.")

    return {
        'parameters': parameters,
        'geographic_chunks': geographic_chunks,
        'time_chunks': time_chunks
    }
Beispiel #10
0
def pixel_drill(task_id=None):
    parameters = parse_parameters_from_task(task_id=task_id)
    validate_parameters(parameters, task_id=task_id)
    task = FractionalCoverTask.objects.get(pk=task_id)

    if task.status == "ERROR":
        return None

    dc = DataAccessApi(config=task.config_path)
    single_pixel = dc.get_stacked_datasets_by_extent(**parameters)
    clear_mask = task.satellite.get_clean_mask_func()(single_pixel.isel(latitude=0, longitude=0))
    single_pixel = single_pixel.where(single_pixel != task.satellite.no_data_value)

    dates = single_pixel.time.values
    if len(dates) < 2:
        task.update_status("ERROR", "There is only a single acquisition for your parameter set.")
        return None

    def _apply_band_math(ds, idx):
        # mask out water manually. Necessary for frac. cover.
        wofs = wofs_classify(ds, clean_mask=clear_mask[idx], mosaic=True)
        clear_mask[idx] = False if wofs.wofs.values[0] == 1 else clear_mask[idx]
        fractional_cover = frac_coverage_classify(ds, clean_mask=clear_mask[idx], no_data=task.satellite.no_data_value)
        return fractional_cover

    fractional_cover = xr.concat(
        [
            _apply_band_math(single_pixel.isel(time=data_point, drop=True), data_point)
            for data_point in range(len(dates))
        ],
        dim='time')

    fractional_cover = fractional_cover.where(fractional_cover != task.satellite.no_data_value).isel(
        latitude=0, longitude=0)

    exclusion_list = []
    plot_measurements = [band for band in fractional_cover.data_vars if band not in exclusion_list]

    datasets = [fractional_cover[band].values.transpose() for band in plot_measurements] + [clear_mask]
    data_labels = [stringcase.titlecase("%{}".format(band)) for band in plot_measurements] + ["Clear"]
    titles = [
        'Bare Soil Percentage', 'Photosynthetic Vegetation Percentage', 'Non-Photosynthetic Vegetation Percentage',
        'Clear Mask'
    ]
    style = ['ro', 'go', 'bo', '.']

    task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
    create_2d_plot(task.plot_path, dates=dates, datasets=datasets, data_labels=data_labels, titles=titles, style=style)

    task.complete = True
    task.update_status("OK", "Done processing pixel drill.")
Beispiel #11
0
def update_data_cube_details(ingested_only=True):
    dataset_types = DatasetType.objects.using('agdc').filter(
        Q(definition__has_keys=['managed']) & Q(definition__has_keys=['measurements']))

    dc = DataAccessApi(config=os.environ.get('DATACUBE_CONFIG_PATH'))

    for dataset_type in dataset_types:
        ingestion_details, created = IngestionDetails.objects.get_or_create(
            dataset_type_ref=dataset_type.id,
            product=dataset_type.name,
            platform=dataset_type.metadata['platform']['code'])
        ingestion_details.update_with_query_metadata(dc.get_datacube_metadata(dataset_type.name))

    dc.close()
Beispiel #12
0
def pixel_drill(task_id=None):
    parameters = parse_parameters_from_task(task_id=task_id)
    validate_parameters(parameters, task_id=task_id)
    task = TsmTask.objects.get(pk=task_id)

    if task.status == "ERROR":
        return None

    dc = DataAccessApi(config=task.config_path)
    single_pixel = dc.get_stacked_datasets_by_extent(**parameters)
    clear_mask = task.satellite.get_clean_mask_func()(single_pixel)
    single_pixel = single_pixel.where(single_pixel != task.satellite.no_data_value)

    dates = single_pixel.time.values
    if len(dates) < 2:
        task.update_status("ERROR", "There is only a single acquisition for your parameter set.")
        return None

    wofs_data = task.get_processing_method()(single_pixel,
                                             clean_mask=clear_mask,
                                             no_data=task.satellite.no_data_value)
    wofs_data = \
        wofs_data.where(wofs_data != task.satellite.no_data_value)
    wofs_data = wofs_data.squeeze()
    tsm_data = \
        tsm(single_pixel, clean_mask=clear_mask, no_data=task.satellite.no_data_value)
    tsm_data = \
        tsm_data.where(tsm_data != task.satellite.no_data_value)\
        .squeeze().where(wofs_data.wofs.values == 1)

    # Remove NaNs to avoid errors and yield a nicer plot.
    water_non_nan_times = ~np.isnan(wofs_data.wofs.values)
    wofs_data = wofs_data.isel(time=water_non_nan_times)
    tsm_non_nan_times = ~np.isnan(tsm_data.tsm.values)
    tsm_data = tsm_data.isel(time=tsm_non_nan_times)

    datasets = [wofs_data.wofs.values.transpose().squeeze(), 
                tsm_data.tsm.values.transpose().squeeze()] + \
                [clear_mask.squeeze()]
    dates = [dates[water_non_nan_times], dates[tsm_non_nan_times]] + [dates]
    data_labels = ["Water/Non Water", "TSM (g/L)"] + ["Clear"]
    titles = ["Water/Non Water", "TSM Values"] + ["Clear Mask"]
    style = ['.', 'ro', '.']

    task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
    create_2d_plot(task.plot_path, dates=dates, datasets=datasets, data_labels=data_labels, titles=titles, style=style)

    task.complete = True
    task.update_status("OK", "Done processing pixel drill.")
Beispiel #13
0
def update_data_cube_details(ingested_only=True):
    dataset_types = DatasetType.objects.using('agdc').filter(
        Q(definition__has_keys=['managed'])
        & Q(definition__has_keys=['measurements']))

    dc = DataAccessApi(config='/home/' + settings.LOCAL_USER +
                       '/Datacube/data_cube_ui/config/.datacube.conf')

    for dataset_type in dataset_types:
        ingestion_details, created = IngestionDetails.objects.get_or_create(
            dataset_type_ref=dataset_type.id,
            product=dataset_type.name,
            platform=dataset_type.metadata['platform']['code'])
        ingestion_details.update_with_query_metadata(
            dc.get_datacube_metadata(dataset_type.name))

    dc.close()
Beispiel #14
0
def validate_parameters(self, parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None
    """
    task = SlipTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    dc = DataAccessApi(config=task.config_path)

    acquisitions = dc.list_acquisition_dates(**parameters)

    if len(acquisitions) < 1:
        task.complete = True
        task.update_status("ERROR",
                           "There are no acquistions for this parameter set.")
        return None

    if len(acquisitions) < task.baseline_length + 1:
        task.complete = True
        task.update_status(
            "ERROR",
            "There are an insufficient number of acquisitions for your baseline length."
        )
        return None

    validation_parameters = {**parameters}
    validation_parameters.pop('time')
    validation_parameters.pop('measurements')
    validation_parameters.update({
        'product': 'terra_aster_gdm_' + task.area_id,
        'platform': 'TERRA'
    })
    if len(dc.list_acquisition_dates(**validation_parameters)) < 1:
        task.complete = True
        task.update_status(
            "ERROR", "There is no elevation data for this parameter set.")
        return None

    if check_cancel_task(self, task): return
    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['product'],
                                    parameters['measurements']):
        task.complete = True
        task.update_status(
            "ERROR",
            "The provided Satellite model measurements aren't valid for the product. Please check the measurements listed in the {} model."
            .format(task.satellite.name))
        return None

    dc.close()
    return parameters
Beispiel #15
0
def validate_parameters(self, parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None
    """
    task = NdviAnomalyTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    dc = DataAccessApi(config=task.config_path)

    acquisitions = dc.list_acquisition_dates(**parameters)

    if len(acquisitions) < 1:
        task.complete = True
        task.update_status("ERROR",
                           "There are no acquistions for this parameter set.")
        return None

    # the actual acquisitino exists, lets try the baseline:
    validation_params = {**parameters}
    # there were no acquisitions in the year 1000, hopefully
    validation_params.update({
        'time': (task.time_start.replace(year=task.time_start.year - 5),
                 task.time_start - timedelta(microseconds=1))
    })
    acquisitions = dc.list_acquisition_dates(**validation_params)

    # list/map/int chain required to cast int to each baseline month, it won't work if they're strings.
    grouped_dates = group_datetimes_by_month(
        acquisitions,
        months=list(map(int, task.baseline_selection.split(","))))

    if check_cancel_task(self, task): return

    if not grouped_dates:
        task.complete = True
        task.update_status("ERROR",
                           "There are no acquistions for this parameter set.")
        return None

    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['product'],
                                    parameters['measurements']):
        task.complete = True
        task.update_status(
            "ERROR",
            "The provided Satellite model measurements aren't valid for the product. Please check the measurements listed in the {} model."
            .format(task.satellite.name))
        return None

    dc.close()
    return parameters
Beispiel #16
0
def pixel_drill(task_id=None):
    parameters = parse_parameters_from_task(task_id=task_id)
    validate_parameters(parameters, task_id=task_id)
    task = BandMathTask.objects.get(pk=task_id)

    if task.status == "ERROR":
        return None

    dc = DataAccessApi(config=task.config_path)
    single_pixel = dc.get_dataset_by_extent(**parameters).isel(latitude=0,
                                                               longitude=0)
    clear_mask = task.satellite.get_clean_mask_func()(single_pixel)
    single_pixel = single_pixel.where(
        single_pixel != task.satellite.no_data_value)

    dates = single_pixel.time.values
    if len(dates) < 2:
        task.update_status(
            "ERROR",
            "There is only a single acquisition for your parameter set.")
        return None

    def _apply_band_math(dataset):
        #TODO: apply your band math here!
        return (dataset.nir - dataset.red) / (dataset.nir + dataset.red)

    datasets = [_apply_band_math(single_pixel).values.transpose()
                ] + [clear_mask]
    data_labels = ["Band Math Result"] + ["Clear"]
    titles = ["Band Math"] + ["Clear Mask"]
    style = ['ro', '.']

    task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
    create_2d_plot(task.plot_path,
                   dates=dates,
                   datasets=datasets,
                   data_labels=data_labels,
                   titles=titles,
                   style=style)

    task.complete = True
    task.update_status("OK", "Done processing pixel drill.")
Beispiel #17
0
def validate_parameters(parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None

    """
    task = CoastalChangeTask.objects.get(pk=task_id)
    dc = DataAccessApi(config=task.config_path)

    validation_params = dict(parameters)
    # verify that both the start and end year have acquisitions
    for year in parameters['time']:
        validation_params.update({'time': (year, year.replace(year=year.year + 1))})
        acquisitions = dc.list_acquisition_dates(**validation_params)
        if len(acquisitions) < 1:
            task.complete = True
            task.update_status("ERROR", "There must be at least one acquisition in both the start and ending year.")
            return None

    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['product'], parameters['measurements']):
        parameters['measurements'] = ['blue', 'green', 'red', 'nir', 'swir1', 'swir2', 'pixel_qa']

    dc.close()
    return parameters
Beispiel #18
0
def validate_parameters(parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None

    """
    task = SpectralIndicesTask.objects.get(pk=task_id)
    dc = DataAccessApi(config=task.config_path)

    #validate for any number of criteria here - num acquisitions, etc.
    acquisitions = dc.list_acquisition_dates(**parameters)

    if len(acquisitions) < 1:
        task.complete = True
        task.update_status("ERROR", "There are no acquistions for this parameter set.")
        return None

    if not task.compositor.is_iterative() and (task.time_end - task.time_start).days > 367:
        task.complete = True
        task.update_status("ERROR", "Median pixel operations are only supported for single year time periods.")
        return None

    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['product'], parameters['measurements']):
        parameters['measurements'] = ['blue', 'green', 'red', 'nir', 'swir1', 'swir2', 'pixel_qa']

    dc.close()
    return parameters
Beispiel #19
0
def validate_parameters(parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None

    """
    task = CloudCoverageTask.objects.get(pk=task_id)
    dc = DataAccessApi(config=task.config_path)

    #validate for any number of criteria here - num acquisitions, etc.
    acquisitions = dc.list_acquisition_dates(**parameters)

    if len(acquisitions) < 1:
        task.complete = True
        task.update_status("ERROR", "There are no acquistions for this parameter set.")
        return None

    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['product'], parameters['measurements']):
        parameters['measurements'] = ['blue', 'green', 'red', 'pixel_qa']

    dc.close()
    return parameters
Beispiel #20
0
def validate_parameters(parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None

    """
    task = WaterDetectionTask.objects.get(pk=task_id)
    dc = DataAccessApi(config=task.config_path)

    acquisitions = dc.list_combined_acquisition_dates(**parameters)

    if len(acquisitions) < 1:
        task.complete = True
        task.update_status("ERROR", "There are no acquistions for this parameter set.")
        return None

    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['products'][0], parameters['measurements']):
        task.complete = True
        task.update_status(
            "ERROR",
            "The provided Satellite model measurements aren't valid for the product. Please check the measurements listed in the {} model.".
            format(task.satellite.name))
        return None

    dc.close()
    return parameters
Beispiel #21
0
def validate_parameters(parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None

    """
    task = SlipTask.objects.get(pk=task_id)
    dc = DataAccessApi(config=task.config_path)

    acquisitions = dc.list_acquisition_dates(**parameters)

    if len(acquisitions) < 1:
        task.complete = True
        task.update_status("ERROR",
                           "There are no acquistions for this parameter set.")
        return None

    if len(acquisitions) < task.baseline_length + 1:
        task.complete = True
        task.update_status(
            "ERROR",
            "There are an insufficient number of acquisitions for your baseline length."
        )
        return None

    validation_parameters = {**parameters}
    validation_parameters.pop('time')
    validation_parameters.pop('measurements')
    validation_parameters.update({
        'product': 'terra_aster_gdm_' + task.area_id,
        'platform': 'TERRA'
    })
    if len(dc.list_acquisition_dates(**validation_parameters)) < 1:
        task.complete = True
        task.update_status(
            "ERROR", "There is no elevation data for this parameter set.")
        return None

    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['product'],
                                    parameters['measurements']):
        parameters['measurements'] = [
            'blue', 'green', 'red', 'nir', 'swir1', 'swir2', 'pixel_qa'
        ]

    dc.close()
    return parameters
Beispiel #22
0
def validate_parameters(self, parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None

    """
    task = SpectralAnomalyTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    dc = DataAccessApi(config=task.config_path)

    baseline_parameters = parameters.copy()
    baseline_parameters['time'] = parameters['baseline_time']
    baseline_acquisitions = dc.list_acquisition_dates(**baseline_parameters)

    analysis_parameters = parameters.copy()
    analysis_parameters['time'] = parameters['analysis_time']
    analysis_acquisitions = dc.list_acquisition_dates(**analysis_parameters)

    if len(baseline_acquisitions) < 1:
        task.complete = True
        task.update_status(
            "ERROR", "There are no acquisitions for this parameter set "
            "for the baseline time period.")
        return None

    if len(analysis_acquisitions) < 1:
        task.complete = True
        task.update_status(
            "ERROR", "There are no acquisitions for this parameter set "
            "for the analysis time period.")
        return None

    if check_cancel_task(self, task): return
    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['product'],
                                    parameters['measurements']):
        task.complete = True
        task.update_status(
            "ERROR",
            "The provided Satellite model measurements aren't valid for the product. Please check the measurements listed in the {} model."
            .format(task.satellite.name))
        return None

    dc.close()
    return parameters
Beispiel #23
0
def validate_parameters(self, parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None

    """
    task = CustomMosaicToolTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    logger.info(f"task.config_path: {task.config_path}")
    dc = DataAccessApi(config=task.config_path)

    #validate for any number of criteria here - num acquisitions, etc.
    acquisitions = dc.list_combined_acquisition_dates(**parameters)

    if len(acquisitions) < 1:
        task.complete = True
        task.update_status("ERROR",
                           "There are no acquistions for this parameter set.")
        return None

    if task.animated_product.animation_id != "none" and not task.compositor.is_iterative(
    ):
        task.complete = True
        task.update_status(
            "ERROR",
            "Animations cannot be generated for median pixel operations.")
        return None

    if not (task.compositor.is_iterative() or task.pixel_drill_task) and (
            task.time_end - task.time_start).days > 367:
        task.complete = True
        task.update_status(
            "ERROR",
            "Median pixel operations are only supported for single year time periods."
        )
        return None

    if check_cancel_task(self, task): return
    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['products'][0],
                                    parameters['measurements']):
        task.complete = True
        task.update_status(
            "ERROR",
            "The provided Satellite model measurements aren't valid for the product. Please check the measurements listed in the {} model."
            .format(task.satellite.name))
        return None

    dc.close()
    return parameters
Beispiel #24
0
def validate_parameters(parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None

    """
    task = AppNameTask.objects.get(pk=task_id)
    dc = DataAccessApi(config=task.config_path)

    #validate for any number of criteria here - num acquisitions, etc.
    # TODO: if this is not a multisensory app, replace list_combined_acquisition_dates with list_acquisition_dates
    acquisitions = dc.list_combined_acquisition_dates(**parameters)

    # TODO: are there any additional validations that need to be done here?
    if len(acquisitions) < 1:
        task.complete = True
        task.update_status("ERROR",
                           "There are no acquistions for this parameter set.")
        return None

    if task.animated_product.animation_id != "none" and not task.compositor.is_iterative(
    ):
        task.complete = True
        task.update_status(
            "ERROR",
            "Animations cannot be generated for median pixel operations.")
        return None

    task.update_status("WAIT", "Validated parameters.")

    # TODO: Check that the measurements exist - replace ['products'][0] with ['products'] if this is not a multisensory app.
    if not dc.validate_measurements(parameters['products'][0],
                                    parameters['measurements']):
        task.complete = True
        task.update_status(
            "ERROR",
            "The provided Satellite model measurements aren't valid for the product. Please check the measurements listed in the {} model."
            .format(task.satellite.name))
        return None

    dc.close()
    return parameters
Beispiel #25
0
def validate_parameters(self, parameters, task_id=None):
    """Validate parameters generated by the parameter parsing task

    All validation should be done here - are there data restrictions?
    Combinations that aren't allowed? etc.

    Returns:
        parameter dict with all keyword args required to load data.
        -or-
        updates the task with ERROR and a message, returning None
    """
    task = CoastalChangeTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    dc = DataAccessApi(config=task.config_path)

    validation_params = dict(parameters)
    # verify that both the start and end year have acquisitions
    for year in parameters['time']:
        validation_params.update(
            {'time': (year, year.replace(year=year.year + 1))})
        acquisitions = dc.list_acquisition_dates(**validation_params)
        if len(acquisitions) < 1:
            task.complete = True
            task.update_status(
                "ERROR",
                "There must be at least one acquisition in both the start and ending year."
            )
            return None

    if check_cancel_task(self, task): return
    task.update_status("WAIT", "Validated parameters.")

    if not dc.validate_measurements(parameters['product'],
                                    parameters['measurements']):
        task.complete = True
        task.update_status(
            "ERROR",
            "The provided Satellite model measurements aren't valid for the product. Please check the measurements listed in the {} model."
            .format(task.satellite.name))
        return None

    dc.close()
    return parameters
Beispiel #26
0
def processing_task(self,
                    task_id=None,
                    geo_chunk_id=None,
                    time_chunk_id=None,
                    geographic_chunk=None,
                    time_chunk=None,
                    **parameters):
    """Process a parameter set and save the results to disk.

    Uses the geographic and time chunk id to identify output products.
    **params is updated with time and geographic ranges then used to load data.
    the task model holds the iterative property that signifies whether the algorithm
    is iterative or if all data needs to be loaded at once.

    Args:
        task_id, geo_chunk_id, time_chunk_id: identification for the main task and what chunk this is processing
        geographic_chunk: range of latitude and longitude to load - dict with keys latitude, longitude
        time_chunk: list of acquisition dates
        parameters: all required kwargs to load data.

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    chunk_id = "_".join([str(geo_chunk_id), str(time_chunk_id)])
    task = TsmTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    logger.info("Starting chunk: " + chunk_id)
    if not os.path.exists(task.get_temp_path()):
        return None

    metadata = {}

    def _get_datetime_range_containing(*time_ranges):
        return (min(time_ranges) - timedelta(microseconds=1), max(time_ranges) + timedelta(microseconds=1))

    times = list(
        map(_get_datetime_range_containing, time_chunk)
        if task.get_iterative() else [_get_datetime_range_containing(time_chunk[0], time_chunk[-1])])
    dc = DataAccessApi(config=task.config_path)
    updated_params = parameters
    updated_params.update(geographic_chunk)
    #updated_params.update({'products': parameters['']})
    water_analysis = None
    tsm_analysis = None
    combined_data = None
    base_index = (task.get_chunk_size()['time'] if task.get_chunk_size()['time'] is not None else 1) * time_chunk_id
    for time_index, time in enumerate(times):
        updated_params.update({'time': time})
        data = dc.get_stacked_datasets_by_extent(**updated_params)

        if check_cancel_task(self, task): return

        if data is None or 'time' not in data:
            logger.info("Invalid chunk.")
            continue

        clear_mask = task.satellite.get_clean_mask_func()(data)

        wofs_data = task.get_processing_method()(data,
                                                 clean_mask=clear_mask,
                                                 enforce_float64=True,
                                                 no_data=task.satellite.no_data_value)
        water_analysis = perform_timeseries_analysis(
            wofs_data, 'wofs', intermediate_product=water_analysis, no_data=task.satellite.no_data_value)

        clear_mask[(data.swir2.values > 100) | (wofs_data.wofs.values == 0)] = False
        tsm_data = tsm(data, clean_mask=clear_mask, no_data=task.satellite.no_data_value)
        tsm_analysis = perform_timeseries_analysis(
            tsm_data, 'tsm', intermediate_product=tsm_analysis, no_data=task.satellite.no_data_value)

        if check_cancel_task(self, task): return

        combined_data = tsm_analysis
        combined_data['wofs'] = water_analysis.total_data
        combined_data['wofs_total_clean'] = water_analysis.total_clean

        metadata = task.metadata_from_dataset(metadata, tsm_data, clear_mask, updated_params)
        if task.animated_product.animation_id != "none":
            path = os.path.join(task.get_temp_path(),
                                "animation_{}_{}.nc".format(str(geo_chunk_id), str(base_index + time_index)))
            animated_data = tsm_data.isel(
                time=0, drop=True) if task.animated_product.animation_id == "scene" else combined_data
            animated_data.to_netcdf(path)

        task.scenes_processed = F('scenes_processed') + 1
        task.save(update_fields=['scenes_processed'])
    if combined_data is None:
        return None
    path = os.path.join(task.get_temp_path(), chunk_id + ".nc")
    combined_data.to_netcdf(path)
    dc.close()
    logger.info("Done with chunk: " + chunk_id)
    return path, metadata, {'geo_chunk_id': geo_chunk_id, 'time_chunk_id': time_chunk_id}
Beispiel #27
0
def processing_task(task_id=None,
                    geo_chunk_id=None,
                    time_chunk_id=None,
                    geographic_chunk=None,
                    time_chunk=None,
                    **parameters):
    """Process a parameter set and save the results to disk.

    Uses the geographic and time chunk id to identify output products.
    **params is updated with time and geographic ranges then used to load data.
    the task model holds the iterative property that signifies whether the algorithm
    is iterative or if all data needs to be loaded at once.

    Args:
        task_id, geo_chunk_id, time_chunk_id: identification for the main task and what chunk this is processing
        geographic_chunk: range of latitude and longitude to load - dict with keys latitude, longitude
        time_chunk: list of acquisition dates
        parameters: all required kwargs to load data.

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """

    chunk_id = "_".join([str(geo_chunk_id), str(time_chunk_id)])
    task = AppNameTask.objects.get(pk=task_id)

    logger.info("Starting chunk: " + chunk_id)
    if not os.path.exists(task.get_temp_path()):
        return None

    iteration_data = None
    metadata = {}

    def _get_datetime_range_containing(*time_ranges):
        return (min(time_ranges) - timedelta(microseconds=1),
                max(time_ranges) + timedelta(microseconds=1))

    times = list(
        map(_get_datetime_range_containing, time_chunk) if task.get_iterative(
        ) else [_get_datetime_range_containing(time_chunk[0], time_chunk[-1])])
    dc = DataAccessApi(config=task.config_path)
    updated_params = parameters
    updated_params.update(geographic_chunk)
    #updated_params.update({'products': parameters['']})
    iteration_data = None
    base_index = (task.get_chunk_size()['time'] if task.get_chunk_size()
                  ['time'] is not None else 1) * time_chunk_id
    for time_index, time in enumerate(times):
        updated_params.update({'time': time})
        # TODO: If this is not a multisensory app replace get_stacked_datasets_by_extent with get_dataset_by_extent
        data = dc.get_stacked_datasets_by_extent(**updated_params)
        if data is None or 'time' not in data:
            logger.info("Invalid chunk.")
            continue

        # TODO: Replace anything here with your processing - do you need to create additional masks? Apply bandmaths? etc.
        clear_mask = create_cfmask_clean_mask(
            data.cf_mask) if 'cf_mask' in data else create_bit_mask(
                data.pixel_qa, [1, 2])
        add_timestamp_data_to_xr(data)

        metadata = task.metadata_from_dataset(metadata, data, clear_mask,
                                              updated_params)

        # TODO: Make sure you're producing everything required for your algorithm.
        iteration_data = task.get_processing_method()(
            data, clean_mask=clear_mask, intermediate_product=iteration_data)

        # TODO: If there is no animation you can remove this block. Otherwise, save off the data that you need.
        if task.animated_product.animation_id != "none":
            path = os.path.join(
                task.get_temp_path(),
                "animation_{}_{}.nc".format(str(geo_chunk_id),
                                            str(base_index + time_index)))
            if task.animated_product.animation_id == "scene":
                #need to clear out all the metadata..
                clear_attrs(data)
                #can't reindex on time - weird?
                data.isel(time=0).drop('time').to_netcdf(path)
            elif task.animated_product.animation_id == "cumulative":
                iteration_data.to_netcdf(path)

        task.scenes_processed = F('scenes_processed') + 1
        task.save()

    if iteration_data is None:
        return None

    path = os.path.join(task.get_temp_path(), chunk_id + ".nc")
    iteration_data.to_netcdf(path)
    dc.close()
    logger.info("Done with chunk: " + chunk_id)
    return path, metadata, {
        'geo_chunk_id': geo_chunk_id,
        'time_chunk_id': time_chunk_id
    }
Beispiel #28
0
def processing_task(task_id=None,
                    geo_chunk_id=None,
                    time_chunk_id=None,
                    geographic_chunk=None,
                    time_chunk=None,
                    **parameters):
    """Process a parameter set and save the results to disk.

    Uses the geographic and time chunk id to identify output products.
    **params is updated with time and geographic ranges then used to load data.
    the task model holds the iterative property that signifies whether the algorithm
    is iterative or if all data needs to be loaded at once.

    Args:
        task_id, geo_chunk_id, time_chunk_id: identification for the main task and what chunk this is processing
        geographic_chunk: range of latitude and longitude to load - dict with keys latitude, longitude
        time_chunk: list of acquisition dates
        parameters: all required kwargs to load data.

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """

    chunk_id = "_".join([str(geo_chunk_id), str(time_chunk_id)])
    task = FractionalCoverTask.objects.get(pk=task_id)

    logger.info("Starting chunk: " + chunk_id)
    if not os.path.exists(task.get_temp_path()):
        return None

    iteration_data = None
    metadata = {}

    def _get_datetime_range_containing(*time_ranges):
        return (min(time_ranges) - timedelta(microseconds=1),
                max(time_ranges) + timedelta(microseconds=1))

    times = list(
        map(_get_datetime_range_containing, time_chunk) if task.get_iterative(
        ) else [_get_datetime_range_containing(time_chunk[0], time_chunk[-1])])
    dc = DataAccessApi(config=task.config_path)
    updated_params = parameters
    updated_params.update(geographic_chunk)
    #updated_params.update({'products': parameters['']})
    iteration_data = None
    base_index = (task.get_chunk_size()['time'] if task.get_chunk_size()
                  ['time'] is not None else 1) * time_chunk_id
    for time_index, time in enumerate(times):
        updated_params.update({'time': time})
        data = dc.get_stacked_datasets_by_extent(**updated_params)
        if data is None or 'time' not in data:
            logger.info("Invalid chunk.")
            continue

        clear_mask = create_cfmask_clean_mask(
            data.cf_mask) if 'cf_mask' in data else create_bit_mask(
                data.pixel_qa, [1, 2])
        add_timestamp_data_to_xr(data)

        metadata = task.metadata_from_dataset(metadata, data, clear_mask,
                                              updated_params)

        iteration_data = task.get_processing_method()(
            data, clean_mask=clear_mask, intermediate_product=iteration_data)

        task.scenes_processed = F('scenes_processed') + 1
        task.save()

    if iteration_data is None:
        return None

    path = os.path.join(task.get_temp_path(), chunk_id + ".nc")
    iteration_data.to_netcdf(path)
    dc.close()
    logger.info("Done with chunk: " + chunk_id)
    return path, metadata, {
        'geo_chunk_id': geo_chunk_id,
        'time_chunk_id': time_chunk_id
    }
Beispiel #29
0
def pixel_drill(task_id=None):
    parameters = parse_parameters_from_task(task_id=task_id)
    validate_parameters(parameters, task_id=task_id)
    task = SpectralIndicesTask.objects.get(pk=task_id)

    if task.status == "ERROR":
        return None

    dc = DataAccessApi(config=task.config_path)
    single_pixel = dc.get_dataset_by_extent(**parameters).isel(latitude=0,
                                                               longitude=0)
    clear_mask = task.satellite.get_clean_mask_func()(single_pixel)
    single_pixel = single_pixel.where(
        single_pixel != task.satellite.no_data_value)

    dates = single_pixel.time.values
    if len(dates) < 2:
        task.update_status(
            "ERROR",
            "There is only a single acquisition for your parameter set.")
        return None

    # spectral_indices_map = {
    #     'ndvi': lambda ds: (ds.nir - ds.red) / (ds.nir + ds.red),
    #     'evi': lambda ds: 2.5 * (ds.nir - ds.red) / (ds.nir + 6 * ds.red - 7.5 * ds.blue + 1),
    #     'savi': lambda ds: (ds.nir - ds.red) / (ds.nir + ds.red + 0.5) * (1.5),
    #     'nbr': lambda ds: (ds.nir - ds.swir2) / (ds.nir + ds.swir2),
    #     'nbr2': lambda ds: (ds.swir1 - ds.swir2) / (ds.swir1 + ds.swir2),
    #     'ndwi': lambda ds: (ds.nir - ds.swir1) / (ds.nir + ds.swir1),
    #     'ndbi': lambda ds: (ds.swir1 - ds.nir) / (ds.nir + ds.swir1),
    # }

    for spectral_index in spectral_indices_map:
        single_pixel[spectral_index] = spectral_indices_map[spectral_index](
            single_pixel)

    exclusion_list = task.satellite.get_measurements()
    plot_measurements = [
        band for band in single_pixel.data_vars if band not in exclusion_list
    ]

    datasets = [
        single_pixel[band].values.transpose() for band in plot_measurements
    ] + [clear_mask]
    data_labels = [
        stringcase.uppercase("{}".format(band)) for band in plot_measurements
    ] + ["Clear"]
    titles = [
        stringcase.uppercase("{}".format(band)) for band in plot_measurements
    ] + ["Clear Mask"]
    style = ['ro', 'go', 'bo', 'co', 'mo', 'yo', 'ko', '.']

    task.plot_path = os.path.join(task.get_result_path(), "plot_path.png")
    create_2d_plot(task.plot_path,
                   dates=dates,
                   datasets=datasets,
                   data_labels=data_labels,
                   titles=titles,
                   style=style)

    task.complete = True
    task.update_status("OK", "Done processing pixel drill.")
Beispiel #30
0
def processing_task(self,
                    task_id=None,
                    geo_chunk_id=None,
                    time_chunk_id=None,
                    geographic_chunk=None,
                    time_chunk=None,
                    **parameters):
    """Process a parameter set and save the results to disk.

    Uses the geographic and time chunk id to identify output products.
    **params is updated with time and geographic ranges then used to load data.
    the task model holds the iterative property that signifies whether the algorithm
    is iterative or if all data needs to be loaded at once.

    Args:
        task_id, geo_chunk_id, time_chunk_id: identification for the main task and what chunk this is processing
        geographic_chunk: range of latitude and longitude to load - dict with keys latitude, longitude
        time_chunk: list of acquisition dates
        parameters: all required kwargs to load data.

    Returns:
        path to the output product, metadata dict, and a dict containing the geo/time ids
    """
    chunk_id = "_".join([str(geo_chunk_id), str(time_chunk_id)])
    task = SpectralIndicesTask.objects.get(pk=task_id)
    if check_cancel_task(self, task): return

    logger.info("Starting chunk: " + chunk_id)
    if not os.path.exists(task.get_temp_path()):
        return None

    metadata = {}

    times = list(
        map(_get_datetime_range_containing, time_chunk) if task.get_iterative(
        ) else [_get_datetime_range_containing(time_chunk[0], time_chunk[-1])])
    dc = DataAccessApi(config=task.config_path)
    updated_params = parameters
    updated_params.update(geographic_chunk)
    iteration_data = None
    for time_index, time in enumerate(times):
        updated_params.update({'time': time})
        data = dc.get_dataset_by_extent(**updated_params)

        if check_cancel_task(self, task): return

        if data is None:
            logger.info("Empty chunk.")
            continue
        if 'time' not in data:
            logger.info("Invalid chunk.")
            continue

        clear_mask = task.satellite.get_clean_mask_func()(data)
        add_timestamp_data_to_xr(data)

        metadata = task.metadata_from_dataset(metadata, data, clear_mask,
                                              updated_params)

        iteration_data = task.get_processing_method()(
            data,
            clean_mask=clear_mask,
            intermediate_product=iteration_data,
            no_data=task.satellite.no_data_value,
            reverse_time=task.get_reverse_time())

        if check_cancel_task(self, task): return

        task.scenes_processed = F('scenes_processed') + 1
        task.save(update_fields=['scenes_processed'])
    if iteration_data is None:
        return None
    path = os.path.join(task.get_temp_path(), chunk_id + ".nc")
    export_xarray_to_netcdf(iteration_data, path)
    dc.close()
    logger.info("Done with chunk: " + chunk_id)
    return path, metadata, {
        'geo_chunk_id': geo_chunk_id,
        'time_chunk_id': time_chunk_id
    }