def perform_task_chunking(parameters, task_id=None): """Chunk parameter sets into more manageable sizes Uses functions provided by the task model to create a group of parameter sets that make up the arg. Args: parameters: parameter stream containing all kwargs to load data Returns: parameters with a list of geographic and time ranges """ if parameters is None: return None task = SlipTask.objects.get(pk=task_id) dc = DataAccessApi(config=task.config_path) dates = dc.list_acquisition_dates(**parameters) task_chunk_sizing = task.get_chunk_size() geographic_chunks = create_geographic_chunks( longitude=parameters['longitude'], latitude=parameters['latitude'], geographic_chunk_size=task_chunk_sizing['geographic']) time_chunks = generate_baseline(dates, task.baseline_length) logger.info("Time chunks: {}, Geo chunks: {}".format(len(time_chunks), len(geographic_chunks))) dc.close() task.update_status("WAIT", "Chunked parameter set.") return {'parameters': parameters, 'geographic_chunks': geographic_chunks, 'time_chunks': time_chunks}
def perform_task_chunking(parameters, task_id=None): """Chunk parameter sets into more manageable sizes Uses functions provided by the task model to create a group of parameter sets that make up the arg. Args: parameters: parameter stream containing all kwargs to load data Returns: parameters with a list of geographic and time ranges """ if parameters is None: return None task = NdviAnomalyTask.objects.get(pk=task_id) dc = DataAccessApi(config=task.config_path) dates = dc.list_acquisition_dates(**parameters) task_chunk_sizing = task.get_chunk_size() geographic_chunks = create_geographic_chunks( longitude=parameters['longitude'], latitude=parameters['latitude'], geographic_chunk_size=task_chunk_sizing['geographic']) grouped_dates_params = {**parameters} grouped_dates_params.update({ 'time': (datetime(1000, 1, 1), task.time_start - timedelta(microseconds=1)) }) acquisitions = dc.list_acquisition_dates(**grouped_dates_params) grouped_dates = group_datetimes_by_month( acquisitions, months=list(map(int, task.baseline_selection.split(",")))) # create a single monolithic list of all acq. dates - there should be only one. time_chunks = [] for date_group in grouped_dates: time_chunks.extend(grouped_dates[date_group]) # time chunks casted to a list, essnetially. time_chunks = [time_chunks] logger.info("Time chunks: {}, Geo chunks: {}".format( len(time_chunks), len(geographic_chunks))) dc.close() task.update_status("WAIT", "Chunked parameter set.") return { 'parameters': parameters, 'geographic_chunks': geographic_chunks, 'time_chunks': time_chunks }
def perform_task_chunking(parameters, task_id=None): """Chunk parameter sets into more manageable sizes Uses functions provided by the task model to create a group of parameter sets that make up the arg. Args: parameters: parameter stream containing all kwargs to load data Returns: parameters with a list of geographic and time ranges """ if parameters is None: return None task = CoastalChangeTask.objects.get(pk=task_id) dc = DataAccessApi(config=task.config_path) dates = dc.list_acquisition_dates(**parameters) task_chunk_sizing = task.get_chunk_size() geographic_chunks = create_geographic_chunks( longitude=parameters['longitude'], latitude=parameters['latitude'], geographic_chunk_size=task_chunk_sizing['geographic']) grouped_dates = group_datetimes_by_year(dates) # we need to pair these with the first year - subsequent years. time_chunks = None if task.animated_product.animation_id == 'none': # first and last only time_chunks = [[ grouped_dates[task.time_start], grouped_dates[task.time_end] ]] else: initial_year = grouped_dates.pop(task.time_start) time_chunks = [[initial_year, grouped_dates[year]] for year in grouped_dates] logger.info("Time chunks: {}, Geo chunks: {}".format( len(time_chunks), len(geographic_chunks))) dc.close() task.update_status("WAIT", "Chunked parameter set.") return { 'parameters': parameters, 'geographic_chunks': geographic_chunks, 'time_chunks': time_chunks }