Exemplo n.º 1
0
    def _export(cfg, env_name, mode, until, skip_tests):
        """
        Build and upload Docker image. Export Argo YAML spec.
        """
        with Commander(workspace=env_name,
                       templates_path=('soopervisor', 'assets')) as e:

            tasks, args = commons.load_tasks(mode=mode)

            if not tasks:
                raise CommanderStop(f'Loaded DAG in {mode!r} mode has no '
                                    'tasks to submit. Try "--mode force" to '
                                    'submit all tasks regardless of status')

            pkg_name, target_image = docker.build(e,
                                                  cfg,
                                                  env_name,
                                                  until=until,
                                                  skip_tests=skip_tests)

            e.info('Generating Argo Workflows YAML spec')
            _make_argo_spec(tasks=tasks,
                            args=args,
                            env_name=env_name,
                            cfg=cfg,
                            pkg_name=pkg_name,
                            target_image=target_image)

            e.info('Submitting jobs to Argo Workflows')
            e.success('Done. Submitted to Argo Workflows')
Exemplo n.º 2
0
    def _export(cfg, env_name, mode, until, skip_tests):
        with Commander(workspace=env_name,
                       templates_path=('soopervisor', 'assets')) as e:
            tasks, args = commons.load_tasks(mode=mode)

            if not tasks:
                raise CommanderStop(f'Loaded DAG in {mode!r} mode has no '
                                    'tasks to submit. Try "--mode force" to '
                                    'submit all tasks regardless of status')

            pkg_name, remote_name = docker.build(e,
                                                 cfg,
                                                 env_name,
                                                 until=until,
                                                 skip_tests=skip_tests)

            e.info('Submitting jobs to AWS Batch')

            submit_dag(tasks=tasks,
                       args=args,
                       job_def=pkg_name,
                       remote_name=remote_name,
                       job_queue=cfg.job_queue,
                       container_properties=cfg.container_properties,
                       region_name=cfg.region_name,
                       cmdr=e)

            e.success('Done. Submitted to AWS Batch')
Exemplo n.º 3
0
def test_load_tasks_missing_remote_metadata(tmp_fast_pipeline,
                                            add_current_to_sys_path, dag_build,
                                            mode, tasks_expected,
                                            args_expected):
    Path('remote', 'out', 'another').unlink()
    tasks, args = commons.load_tasks(mode=mode)
    assert tasks == tasks_expected
    assert args == args_expected
Exemplo n.º 4
0
    def _export(cfg, env_name, mode, until, skip_tests):
        """
        Copies the current source code to the target environment folder.
        The code along with the DAG declaration file can be copied to
        AIRFLOW_HOME for execution
        """
        with Commander(workspace=env_name,
                       templates_path=('soopervisor', 'assets')) as e:
            tasks, args = commons.load_tasks(mode=mode)

            if not tasks:
                raise CommanderStop(f'Loaded DAG in {mode!r} mode has no '
                                    'tasks to submit. Try "--mode force" to '
                                    'submit all tasks regardless of status')

            pkg_name, target_image = commons.docker.build(
                e, cfg, env_name, until=until, skip_tests=skip_tests)

            dag_dict = generate_airflow_spec(tasks, args, target_image)

            path_dag_dict_out = Path(pkg_name + '.json')
            path_dag_dict_out.write_text(json.dumps(dag_dict))
Exemplo n.º 5
0
def test_invalid_mode():
    with pytest.raises(ValueError) as excinfo:
        commons.load_tasks(mode='unknown')

    assert 'mode must be one of' in str(excinfo.value)
Exemplo n.º 6
0
def test_load_tasks(tmp_fast_pipeline, add_current_to_sys_path, dag_build,
                    mode, tasks_expected, args_expected):
    tasks, args = commons.load_tasks(mode=mode)
    assert tasks == tasks_expected
    assert args == args_expected