Ejemplo n.º 1
0
def run(index,
        dry_run: bool,
        tag: str,
        task_desc_file: str,
        qsub: QSubLauncher,
        runner: TaskRunner,
        *args, **kwargs):
    _LOG.info('Starting Fractional Cover processing...')
    _LOG.info('Tag: %r', tag)

    task_desc = serialise.load_structure(Path(task_desc_file), TaskDescription)
    config, tasks = task_app.load_tasks(task_desc.runtime_state.task_serialisation_path)

    if dry_run:
        task_app.check_existing_files((task['filename'] for task in tasks))
        return 0

    task_func = partial(do_fc_task, config)
    process_func = partial(process_result, index)

    try:
        runner(task_desc, tasks, task_func, process_func)
        _LOG.info("Runner finished normally, triggering shutdown.")
    finally:
        runner.stop()
Ejemplo n.º 2
0
def test_dump_load_task_structure(tmpdir):
    # Dump to json and reload, check equality.

    d = Path(str(tmpdir))
    task_description = TaskDescription(
        type_="reproject",
        task_dt=datetime.datetime.utcnow(),
        events_path=d.joinpath('events'),
        logs_path=d.joinpath('logs'),
        parameters=DefaultJobParameters(
            query={'time': [2013, 2015]},
            source_products=['ls5_nbar_albers'],
            output_products=['ls5_nbar_waterman_butterfly'],
        ),
        # Task-app framework
        runtime_state=TaskAppState(
            config_path=Path('config.test.yaml'),
            task_serialisation_path=d.joinpath('generated-tasks.pickle'),
        )
    )

    serialised_file = d.joinpath('task_description.json')
    serialise.dump_structure(serialised_file, task_description)

    result = serialise.load_structure(serialised_file, expected_type=TaskDescription)

    assert result == task_description
Ejemplo n.º 3
0
def _make_config_and_description(index: Index, task_desc_path: Path) -> Tuple[dict, TaskDescription]:
    task_desc = serialise.load_structure(task_desc_path, TaskDescription)

    task_time: datetime = task_desc.task_dt
    app_config = task_desc.runtime_state.config_path

    config = paths.read_document(app_config)

    # TODO: This carries over the old behaviour of each load. Should probably be replaced with *tag*
    config['task_timestamp'] = int(task_time.timestamp())
    config['app_config_file'] = Path(app_config)
    config = make_fc_config(index, config)

    return config, task_desc
Ejemplo n.º 4
0
def _make_config_and_description(
        index: Index, task_desc_path: Path) -> Tuple[dict, TaskDescription]:
    task_desc = serialise.load_structure(task_desc_path, TaskDescription)

    app_config = task_desc.runtime_state.config_path

    config = paths.read_document(app_config)

    config['output_type'] = config[
        'output_type']  # TODO: Temporary until ODC code is updated
    config['app_config_file'] = str(app_config)
    config = stacker.make_stacker_config(index, config)
    config['taskfile_version'] = make_tag(task_desc)
    config['version'] = digitalearthau.__version__ + ' ' + datacube.__version__

    return config, task_desc
Ejemplo n.º 5
0
def run(index, dry_run: bool, task_desc_file: str, runner: TaskRunner, qsub):
    _LOG.info('Starting DEA Stacker processing...')

    task_desc = serialise.load_structure(Path(task_desc_file), TaskDescription)
    config, tasks = task_app.load_tasks(
        task_desc.runtime_state.task_serialisation_path)

    if dry_run:
        task_app.check_existing_files((task['filename'] for task in tasks))
        return

    task_func = partial(stacker.do_stack_task, config)
    process_func = partial(stacker.process_result, index)

    try:
        runner(task_desc, tasks, task_func, process_func)
        _LOG.info("Runner finished normally, triggering shutdown.")
    finally:
        runner.stop()