def processing_task(task_id=None, geo_chunk_id=None, time_chunk_id=None, geographic_chunk=None, time_chunk=None, **parameters): """Process a parameter set and save the results to disk. Uses the geographic and time chunk id to identify output products. **params is updated with time and geographic ranges then used to load data. the task model holds the iterative property that signifies whether the algorithm is iterative or if all data needs to be loaded at once. Args: task_id, geo_chunk_id, time_chunk_id: identification for the main task and what chunk this is processing geographic_chunk: range of latitude and longitude to load - dict with keys latitude, longitude time_chunk: list of acquisition dates parameters: all required kwargs to load data. Returns: path to the output product, metadata dict, and a dict containing the geo/time ids """ chunk_id = "_".join([str(geo_chunk_id), str(time_chunk_id)]) task = NdviAnomalyTask.objects.get(pk=task_id) logger.info("Starting chunk: " + chunk_id) if not os.path.exists(task.get_temp_path()): return None metadata = {} def _get_datetime_range_containing(*time_ranges): return (min(time_ranges) - timedelta(microseconds=1), max(time_ranges) + timedelta(microseconds=1)) base_scene_time_range = parameters['time'] dc = DataAccessApi(config=task.config_path) updated_params = parameters updated_params.update(geographic_chunk) # Generate the baseline data - one time slice at a time full_dataset = [] for time_index, time in enumerate(time_chunk): updated_params.update({'time': _get_datetime_range_containing(time)}) data = dc.get_dataset_by_extent(**updated_params) if data is None or 'time' not in data: logger.info("Invalid chunk.") continue full_dataset.append(data.copy(deep=True)) # load selected scene and mosaic just in case we got two scenes (handles scene boundaries/overlapping data) updated_params.update({'time': base_scene_time_range}) selected_scene = dc.get_dataset_by_extent(**updated_params) if len(full_dataset) == 0 or 'time' not in selected_scene: return None #concat individual slices over time, compute metadata + mosaic baseline_data = xr.concat(full_dataset, 'time') baseline_clear_mask = create_cfmask_clean_mask( baseline_data.cf_mask ) if 'cf_mask' in baseline_data else create_bit_mask( baseline_data.pixel_qa, [1, 2]) metadata = task.metadata_from_dataset(metadata, baseline_data, baseline_clear_mask, parameters) selected_scene_clear_mask = create_cfmask_clean_mask( selected_scene.cf_mask ) if 'cf_mask' in selected_scene else create_bit_mask( selected_scene.pixel_qa, [1, 2]) metadata = task.metadata_from_dataset(metadata, selected_scene, selected_scene_clear_mask, parameters) selected_scene = task.get_processing_method()( selected_scene, clean_mask=selected_scene_clear_mask, intermediate_product=None) # we need to re generate the clear mask using the mosaic now. selected_scene_clear_mask = create_cfmask_clean_mask( selected_scene.cf_mask ) if 'cf_mask' in selected_scene else create_bit_mask( selected_scene.pixel_qa, [1, 2]) ndvi_products = compute_ndvi_anomaly( baseline_data, selected_scene, baseline_clear_mask=baseline_clear_mask, selected_scene_clear_mask=selected_scene_clear_mask) full_product = xr.merge([ndvi_products, selected_scene]) task.scenes_processed = F('scenes_processed') + 1 task.save() path = os.path.join(task.get_temp_path(), chunk_id + ".nc") full_product.to_netcdf(path) dc.close() logger.info("Done with chunk: " + chunk_id) return path, metadata, { 'geo_chunk_id': geo_chunk_id, 'time_chunk_id': time_chunk_id }
def processing_task(task_id=None, geo_chunk_id=None, time_chunk_id=None, geographic_chunk=None, time_chunk=None, **parameters): """Process a parameter set and save the results to disk. Uses the geographic and time chunk id to identify output products. **params is updated with time and geographic ranges then used to load data. the task model holds the iterative property that signifies whether the algorithm is iterative or if all data needs to be loaded at once. Args: task_id, geo_chunk_id, time_chunk_id: identification for the main task and what chunk this is processing geographic_chunk: range of latitude and longitude to load - dict with keys latitude, longitude time_chunk: list of acquisition dates parameters: all required kwargs to load data. Returns: path to the output product, metadata dict, and a dict containing the geo/time ids """ chunk_id = "_".join([str(geo_chunk_id), str(time_chunk_id)]) task = BandMathTask.objects.get(pk=task_id) logger.info("Starting chunk: " + chunk_id) if not os.path.exists(task.get_temp_path()): return None iteration_data = None metadata = {} def _get_datetime_range_containing(*time_ranges): return (min(time_ranges) - timedelta(microseconds=1), max(time_ranges) + timedelta(microseconds=1)) times = list( map(_get_datetime_range_containing, time_chunk) if task.get_iterative( ) else [_get_datetime_range_containing(time_chunk[0], time_chunk[-1])]) dc = DataAccessApi(config=task.config_path) updated_params = parameters updated_params.update(geographic_chunk) #updated_params.update({'products': parameters['']}) iteration_data = None base_index = (task.get_chunk_size()['time'] if task.get_chunk_size() ['time'] is not None else 1) * time_chunk_id for time_index, time in enumerate(times): updated_params.update({'time': time}) data = dc.get_dataset_by_extent(**updated_params) if data is None or 'time' not in data: logger.info("Invalid chunk.") continue clear_mask = create_cfmask_clean_mask( data.cf_mask) if 'cf_mask' in data else create_bit_mask( data.pixel_qa, [1, 2]) add_timestamp_data_to_xr(data) metadata = task.metadata_from_dataset(metadata, data, clear_mask, updated_params) iteration_data = task.get_processing_method()( data, clean_mask=clear_mask, intermediate_product=iteration_data) task.scenes_processed = F('scenes_processed') + 1 task.save() if iteration_data is None: return None path = os.path.join(task.get_temp_path(), chunk_id + ".nc") iteration_data.to_netcdf(path) dc.close() logger.info("Done with chunk: " + chunk_id) return path, metadata, { 'geo_chunk_id': geo_chunk_id, 'time_chunk_id': time_chunk_id }
def processing_task(task_id=None, geo_chunk_id=None, time_chunk_id=None, geographic_chunk=None, time_chunk=None, **parameters): """Process a parameter set and save the results to disk. Uses the geographic and time chunk id to identify output products. **params is updated with time and geographic ranges then used to load data. the task model holds the iterative property that signifies whether the algorithm is iterative or if all data needs to be loaded at once. Computes a single SLIP baseline comparison - returns a slip mask and mosaic. Args: task_id, geo_chunk_id, time_chunk_id: identification for the main task and what chunk this is processing geographic_chunk: range of latitude and longitude to load - dict with keys latitude, longitude time_chunk: list of acquisition dates parameters: all required kwargs to load data. Returns: path to the output product, metadata dict, and a dict containing the geo/time ids """ chunk_id = "_".join([str(geo_chunk_id), str(time_chunk_id)]) task = SlipTask.objects.get(pk=task_id) logger.info("Starting chunk: " + chunk_id) if not os.path.exists(task.get_temp_path()): return None metadata = {} def _get_datetime_range_containing(*time_ranges): return (min(time_ranges) - timedelta(microseconds=1), max(time_ranges) + timedelta(microseconds=1)) time_range = _get_datetime_range_containing(time_chunk[0], time_chunk[-1]) dc = DataAccessApi(config=task.config_path) updated_params = {**parameters} updated_params.update(geographic_chunk) updated_params.update({'time': time_range}) data = dc.get_dataset_by_extent(**updated_params) #grab dem data as well dem_parameters = {**updated_params} dem_parameters.update({'product': 'terra_aster_gdm_' + task.area_id, 'platform': 'TERRA'}) dem_parameters.pop('time') dem_parameters.pop('measurements') dem_data = dc.get_dataset_by_extent(**dem_parameters) if 'time' not in data or 'time' not in dem_data: return None #target data is most recent, with the baseline being everything else. target_data = xr.concat([data.isel(time=-1)], 'time') baseline_data = data.isel(time=slice(None, -1)) target_clear_mask = create_cfmask_clean_mask(target_data.cf_mask) if 'cf_mask' in target_data else create_bit_mask( target_data.pixel_qa, [1, 2]) baseline_clear_mask = create_cfmask_clean_mask( baseline_data.cf_mask) if 'cf_mask' in baseline_data else create_bit_mask(baseline_data.pixel_qa, [1, 2]) combined_baseline = task.get_processing_method()(baseline_data, clean_mask=baseline_clear_mask) target_data = create_mosaic(target_data, clean_mask=target_clear_mask) slip_data = compute_slip(combined_baseline, target_data, dem_data) target_data['slip'] = slip_data metadata = task.metadata_from_dataset( metadata, target_data, target_clear_mask, updated_params, time=data.time.values.astype('M8[ms]').tolist()[-1]) task.scenes_processed = F('scenes_processed') + 1 task.save() path = os.path.join(task.get_temp_path(), chunk_id + ".nc") clear_attrs(target_data) target_data.to_netcdf(path) dc.close() logger.info("Done with chunk: " + chunk_id) return path, metadata, {'geo_chunk_id': geo_chunk_id, 'time_chunk_id': time_chunk_id}