def main(index, config, tasks, executor, queue_size, **kwargs): click.echo('Starting stacking utility...') task_func = partial(do_stack_task, config) process_func = partial(process_result, index) if config['index_datasets'] else do_nothing task_app.run_tasks(tasks, executor, task_func, process_func, queue_size)
def fixer(index, config, tasks, executor, queue_size, **kwargs): """This script rewrites unstacked dataset files to correct their NetCDF metadata.""" click.echo('Starting fixer utility...') task_func = partial(do_fixer_task, config) process_func = partial(process_result, index) if config['index_datasets'] else None task_app.run_tasks(tasks, executor, task_func, process_func, queue_size)
def main(index, config, tasks, executor, queue_size, **kwargs): """This script creates NetCDF files containing an entire year of tiles in the same file.""" click.echo('Starting stacking utility...') task_func = partial(do_stack_task, config) process_func = partial(process_result, index) if config['index_datasets'] else None task_app.run_tasks(tasks, executor, task_func, process_func, queue_size)
def full(index, config, tasks, executor, queue_size, **kwargs): """Create ncml files for the full time depth of the product e.g. datacube-ncml full <app_config_yaml> """ click.echo('Starting datacube ncml utility...') task_func = partial(do_ncml_task, config) task_app.run_tasks(tasks, executor, task_func, None, queue_size)
def main(index, config, tasks, executor, queue_size, **kwargs): """Store datasets into NetCDF files containing an entire year in the same file. - Uses the same configuration format as the `ingest` tool. - However, does not create new datasets, but instead updates dataset locations then archives the original location. """ click.echo('Starting stacking utility...') task_func = partial(do_stack_task, config) process_func = partial(process_result, index) if config['index_datasets'] else None task_app.run_tasks(tasks, executor, task_func, process_func, queue_size)
def update(index, config, tasks, executor, queue_size, **kwargs): """Update a single year ncml file e.g datacube-ncml <app_config_yaml> 1996 This can be used to update an existing ncml file created with `nest` when new data is added. """ click.echo('Starting datacube ncml utility...') task_func = partial(do_ncml_task, config) task_app.run_tasks(tasks, executor, task_func, None, queue_size)
def nest(index, config, tasks, executor, queue_size, **kwargs): """Create ncml files for the full time, with nested ncml files covering the given years e.g. datacube-ncml nest <app_config_yaml> 2016 2017 This will refer to the actual files (hopefully stacked), and make ncml files for the given (ie unstacked) years. Use the `update` command when new data is added to a year, without having to rerun for the entire time depth. """ click.echo('Starting datacube ncml utility...') task_func = partial(do_ncml_task, config) task_app.run_tasks(tasks, executor, task_func, None, queue_size)
def test_run_tasks(): executor = datacube.executor.SerialExecutor() tasks = ({'val': i} for i in range(3)) tasks_to_do = list(range(3)) def task_func(task): x = task['val'] return (x, x**2) def process_result_func(result): assert result[0]**2 == result[1] tasks_to_do.remove(result[0]) run_tasks(tasks, executor, task_func, process_result_func) assert not tasks_to_do
def app_main(db_index, config, tasks, executor, **opts): """ make_config => config config => make_tasks => tasks """ from pickle import dumps unused(db_index, opts, config) click.echo('Using executor {}'.format(repr(executor))) task_runner = wrap_task(run_task, config['op']) click.echo('Task function size: {}'.format(len(dumps(task_runner)))) run_tasks(tasks, executor, task_runner, queue_size=10) return 0