Exemplo n.º 1
0
def test_execute_run_fail_pipeline():
    with get_bar_repo_handle() as repo_handle:
        pipeline_handle = PipelineHandle("fail", repo_handle)
        runner = CliRunner()

        with instance_for_test(
                overrides={
                    "compute_logs": {
                        "module":
                        "dagster.core.storage.noop_compute_log_manager",
                        "class": "NoOpComputeLogManager",
                    }
                }) as instance:
            instance = DagsterInstance.get()
            run = create_run_for_test(instance,
                                      pipeline_name="foo",
                                      run_id="new_run")

            input_json = serialize_dagster_namedtuple(
                ExecuteRunArgs(
                    pipeline_origin=pipeline_handle.get_python_origin(),
                    pipeline_run_id=run.run_id,
                    instance_ref=instance.get_ref(),
                ))

            result = runner_execute_run(
                runner,
                [input_json],
            )
            assert result.exit_code == 0

            assert "RUN_FAILURE" in result.stdout, "no match, result: {}".format(
                result)

            run = create_run_for_test(instance,
                                      pipeline_name="foo",
                                      run_id="new_run_raise_on_error")

            input_json_raise_on_failure = serialize_dagster_namedtuple(
                ExecuteRunArgs(
                    pipeline_origin=pipeline_handle.get_python_origin(),
                    pipeline_run_id=run.run_id,
                    instance_ref=instance.get_ref(),
                    set_exit_code_on_failure=True,
                ))

            result = runner.invoke(api.execute_run_command,
                                   [input_json_raise_on_failure])

            assert result.exit_code != 0, str(result.stdout)

            assert "RUN_FAILURE" in result.stdout, "no match, result: {}".format(
                result)

            # Framework errors (e.g. running a run that has already run) also result in a non-zero error code
            result = runner.invoke(api.execute_run_command,
                                   [input_json_raise_on_failure])
            assert result.exit_code != 0, str(result.stdout)
Exemplo n.º 2
0
    def get_subset_external_pipeline_result(self, selector):
        check.inst_param(selector, "selector", PipelineSelector)
        check.invariant(
            selector.location_name == self.name,
            "PipelineSelector location_name mismatch, got {selector.location_name} expected {self.name}".format(
                self=self, selector=selector
            ),
        )

        external_repository = self.external_repositories[selector.repository_name]
        pipeline_handle = PipelineHandle(selector.pipeline_name, external_repository.handle)
        return sync_get_external_pipeline_subset_grpc(
            self._handle.client, pipeline_handle.get_external_origin(), selector.solid_selection
        )
Exemplo n.º 3
0
    def get_subset_external_pipeline_result(self, selector):
        from dagster.api.snapshot_pipeline import sync_get_external_pipeline_subset

        check.inst_param(selector, 'selector', PipelineSelector)
        check.invariant(
            selector.location_name == self.name,
            'PipelineSelector location_name mismatch, got {selector.location_name} expected {self.name}'
            .format(self=self, selector=selector),
        )

        external_repository = self.external_repositories[
            selector.repository_name]
        pipeline_handle = PipelineHandle(selector.pipeline_name,
                                         external_repository.handle)
        return sync_get_external_pipeline_subset(pipeline_handle.get_origin(),
                                                 selector.solid_selection)
Exemplo n.º 4
0
def test_queue_from_schedule_and_sensor(tmpdir, foo_example_repo):
    dagster_home_path = tmpdir.strpath
    with setup_instance(
            dagster_home_path,
            """run_coordinator:
    module: dagster.core.run_coordinator
    class: QueuedRunCoordinator
    config:
        dequeue_interval_seconds: 1
    """,
    ) as instance:
        external_schedule = foo_example_repo.get_external_schedule(
            "never_run_schedule")
        external_sensor = foo_example_repo.get_external_sensor(
            "never_on_sensor")

        foo_pipeline_handle = PipelineHandle("foo_pipeline",
                                             foo_example_repo.handle)

        instance.start_schedule_and_update_storage_state(external_schedule)
        instance.start_sensor(external_sensor)

        with start_daemon(timeout=180):
            run = create_run(instance, foo_pipeline_handle)
            with external_pipeline_from_run(run) as external_pipeline:
                instance.submit_run(run.run_id, external_pipeline)

                runs = [
                    poll_for_finished_run(instance, run.run_id),
                    poll_for_finished_run(
                        instance,
                        run_tags=PipelineRun.tags_for_sensor(external_sensor)),
                    poll_for_finished_run(
                        instance,
                        run_tags=PipelineRun.tags_for_schedule(
                            external_schedule),
                        timeout=90,
                    ),
                ]

                for run in runs:
                    logs = instance.all_logs(run.run_id)
                    assert_events_in_order(
                        logs,
                        [
                            "PIPELINE_ENQUEUED",
                            "PIPELINE_DEQUEUED",
                            "PIPELINE_STARTING",
                            "PIPELINE_START",
                            "PIPELINE_SUCCESS",
                        ],
                    )
Exemplo n.º 5
0
def foo_pipeline_handle(foo_example_repo):  # pylint: disable=redefined-outer-name
    return PipelineHandle("foo_pipeline", foo_example_repo.handle)
Exemplo n.º 6
0
def get_foo_pipeline_handle():
    with get_bar_repo_handle() as repo_handle:
        yield PipelineHandle("foo", repo_handle)
Exemplo n.º 7
0
def foo_pipeline_handle():
    with get_example_repo_handle() as repo_handle:
        yield PipelineHandle("foo_pipeline", repo_handle)
Exemplo n.º 8
0
def legacy_get_foo_pipeline_handle():
    return PipelineHandle("foo", legacy_get_bar_repo_handle())