Пример #1
0
def test_instance_yaml(docs_snippets_folder):
    intance_yaml_folder = os.path.join(
        docs_snippets_folder,
        "deploying",
        "concurrency_limits",
    )
    assert isinstance(
        InstanceRef.from_dir(intance_yaml_folder).run_coordinator,
        QueuedRunCoordinator)
Пример #2
0
def test_unique_value_instance_yaml(docs_snippets_folder):
    intance_yaml_folder = os.path.join(
        docs_snippets_folder,
        "deploying",
        "concurrency_limits",
    )
    assert isinstance(
        InstanceRef.from_dir(
            intance_yaml_folder,
            config_filename="per-unique-value-dagster.yaml").run_coordinator,
        QueuedRunCoordinator,
    )
Пример #3
0
def test_nothing_new(mock_event_records):
    mock_event_records.side_effect = get_mock_event_records([("comments", 1),
                                                             ("stories", 2)])

    with tempfile.TemporaryDirectory() as tmpdir_path:
        context = SensorEvaluationContext(
            instance_ref=InstanceRef.from_dir(tmpdir_path),
            last_run_key="1|2",
            last_completion_time=None,
            cursor=None,
            repository_name=None,
        )
        requests = story_recommender_on_hn_table_update.evaluate_tick(
            context).run_requests
        assert len(requests) == 0
Пример #4
0
def test_execution_plan_snapshot_backcompat():

    src_dir = file_relative_path(__file__, "test_execution_plan_snapshots/")
    snapshot_dirs = [
        f for f in os.listdir(src_dir)
        if not os.path.isfile(os.path.join(src_dir, f))
    ]
    for snapshot_dir_path in snapshot_dirs:
        print(f"Executing a saved run from {snapshot_dir_path}")  # pylint: disable=print-call

        with copy_directory(os.path.join(src_dir,
                                         snapshot_dir_path)) as test_dir:
            with DagsterInstance.from_ref(
                    InstanceRef.from_dir(test_dir)) as instance:
                runs = instance.get_runs()
                assert len(runs) == 1

                run = runs[0]
                assert run.status == PipelineRunStatus.NOT_STARTED

                the_pipeline = InMemoryPipeline(dynamic_pipeline)

                # First create a brand new plan from the pipeline and validate it
                new_plan = create_execution_plan(the_pipeline,
                                                 run_config=run.run_config)
                _validate_execution_plan(new_plan)

                # Create a snapshot and rebuild it, validate the rebuilt plan
                new_plan_snapshot = snapshot_from_execution_plan(
                    new_plan, run.pipeline_snapshot_id)
                rebuilt_plan = ExecutionPlan.rebuild_from_snapshot(
                    "dynamic_pipeline", new_plan_snapshot)
                _validate_execution_plan(rebuilt_plan)

                # Then validate the plan built from the historical snapshot on the run
                stored_snapshot = instance.get_execution_plan_snapshot(
                    run.execution_plan_snapshot_id)

                rebuilt_plan = ExecutionPlan.rebuild_from_snapshot(
                    "dynamic_pipeline", stored_snapshot)
                _validate_execution_plan(rebuilt_plan)

                # Finally, execute the run (using the historical execution plan snapshot)
                result = execute_run(the_pipeline,
                                     run,
                                     instance,
                                     raise_on_error=True)
                assert result.success
Пример #5
0
def dagster_instance(helm_postgres_url):  # pylint: disable=redefined-outer-name

    with tempfile.TemporaryDirectory() as tempdir:
        with environ({"DAGSTER_HOME": tempdir}):

            with DagsterInstance(
                instance_type=InstanceType.PERSISTENT,
                local_artifact_storage=LocalArtifactStorage(tempdir),
                run_storage=PostgresRunStorage(helm_postgres_url),
                event_storage=PostgresEventLogStorage(helm_postgres_url),
                compute_log_manager=NoOpComputeLogManager(),
                run_coordinator=DefaultRunCoordinator(),
                run_launcher=ExplodingRunLauncher(),  # use graphql to launch any runs
                ref=InstanceRef.from_dir(tempdir),
            ) as instance:
                yield instance

                check_export_runs(instance)
def test_queued_runs(tmpdir, foo_pipeline_handle):
    dagster_home_path = tmpdir.strpath
    setup_instance(dagster_home_path)
    with start_daemon():
        instance_ref = InstanceRef.from_dir(dagster_home_path)
        with DagsterInstance.from_ref(instance_ref) as instance:

            run = create_run(instance, foo_pipeline_handle)
            with external_pipeline_from_run(run) as external_pipeline:
                instance.submit_run(run.run_id, external_pipeline)

            poll_for_finished_run(instance, run.run_id)

            logs = instance.all_logs(run.run_id)
            assert_events_in_order(
                logs,
                ["PIPELINE_ENQUEUED", "PIPELINE_DEQUEUED", "PIPELINE_SUCCESS"],
            )
Пример #7
0
def dagster_instance_for_k8s_run_launcher(
    helm_postgres_url_for_k8s_run_launcher,
):  # pylint: disable=redefined-outer-name
    tempdir = DagsterInstance.temp_storage()

    instance_ref = InstanceRef.from_dir(tempdir)

    with DagsterInstance(
        instance_type=InstanceType.PERSISTENT,
        local_artifact_storage=LocalArtifactStorage(tempdir),
        run_storage=PostgresRunStorage(helm_postgres_url_for_k8s_run_launcher),
        event_storage=PostgresEventLogStorage(helm_postgres_url_for_k8s_run_launcher),
        schedule_storage=PostgresScheduleStorage(helm_postgres_url_for_k8s_run_launcher),
        compute_log_manager=NoOpComputeLogManager(),
        run_coordinator=DefaultRunCoordinator(),
        run_launcher=ExplodingRunLauncher(),
        ref=instance_ref,
    ) as instance:
        yield instance

        check_export_runs(instance)