Exemple #1
0
def save_tasks():
    # Creates a pickle file with the locations of every datacube observation specified in the yaml file 
    with open('stats_config.yaml') as fl:
        config = yaml.load(fl)

    print(yaml.dump(config, indent=4))

    print('generating tasks')
    dc = Datacube(app='api-example', config='cambodia.conf')
    app = StatsApp(config, index = dc.index)
    pickle_stream(app.generate_tasks(dc.index), 'task.pickle')
Exemple #2
0
def prune_tasks(quartile):
    # 'prunes' the output pickle file from save_tasks() with a list of dates to create a new pickle file
    
    # Import netcdf with the list of dates
    spei_q_dates = xr.open_dataset(f"spei_q{quartile}_dates.nc")

    pruned = (transform_task(task, spei_q_dates, quartile)
              for task in unpickle_stream('task.pickle'))
    pruned = (task for task in pruned if task is not None)

    pickle_stream(pruned, f'task_q{quartile}.pickle')
Exemple #3
0
    def save_tasks_to_file(self, filename, index):
        _LOG.debug('Saving tasks to %s.', filename)
        output_products = self.configure_outputs(index)

        tasks = self.generate_tasks(index, output_products)
        num_saved = pickle_stream(tasks, filename)
        _LOG.debug('Successfully saved %s tasks to %s.', num_saved, filename)