Пример #1
0
def _verify_checkpoint_does_not_exist(context: DataContext, checkpoint: str,
                                      usage_event: str) -> None:
    if checkpoint in context.list_checkpoints():
        toolkit.exit_with_failure_message_and_stats(
            context,
            usage_event,
            f"A checkpoint named `{checkpoint}` already exists. Please choose a new name.",
        )
def test_checkpoint_new_specify_datasource_with_ge_config_v2(
    mock_emit,
    caplog,
    titanic_data_context_stats_enabled_config_version_2,
    titanic_expectation_suite,
):
    context = titanic_data_context_stats_enabled_config_version_2
    root_dir = context.root_directory
    assert context.list_checkpoints() == []
    context.save_expectation_suite(titanic_expectation_suite)
    assert context.list_expectation_suite_names() == ["Titanic.warning"]
    mock_emit.reset_mock()

    runner = CliRunner(mix_stderr=False)
    result = runner.invoke(
        cli,
        f"checkpoint new passengers Titanic.warning -d {root_dir} --datasource mydatasource",
        input="1\n1\n",
        catch_exceptions=False,
    )
    stdout = result.stdout
    assert result.exit_code == 0
    assert "A checkpoint named `passengers` was added to your project" in stdout

    assert mock_emit.call_count == 2
    assert mock_emit.call_args_list == [
        mock.call({
            "event_payload": {},
            "event": "data_context.__init__",
            "success": True
        }),
        mock.call({
            "event": "cli.checkpoint.new",
            "event_payload": {
                "api_version": "v2"
            },
            "success": True,
        }),
    ]
    expected_checkpoint = os.path.join(
        root_dir,
        DataContextConfigDefaults.CHECKPOINTS_BASE_DIRECTORY.value,
        "passengers.yml",
    )
    assert os.path.isfile(expected_checkpoint)

    # Newup a context for additional assertions
    context = DataContext(root_dir)
    assert context.list_checkpoints() == ["passengers"]

    assert_no_logging_messages_or_tracebacks(
        my_caplog=caplog,
        click_result=result,
        allowed_deprecation_message=
        LEGACY_CONFIG_DEFAULT_CHECKPOINT_STORE_MESSAGE,
    )
def test_checkpoint_new_works_if_checkpoints_directory_is_missing(
        mock_emit, caplog, titanic_data_context_stats_enabled,
        titanic_expectation_suite):
    context = titanic_data_context_stats_enabled
    root_dir = context.root_directory
    checkpoints_dir = os.path.join(
        root_dir, DataContextConfigDefaults.CHECKPOINTS_BASE_DIRECTORY.value)
    shutil.rmtree(checkpoints_dir)
    assert not os.path.isdir(checkpoints_dir)
    assert context.list_checkpoints() == []

    context.save_expectation_suite(titanic_expectation_suite)
    assert context.list_expectation_suite_names() == ["Titanic.warning"]
    mock_emit.reset_mock()

    runner = CliRunner(mix_stderr=False)
    result = runner.invoke(
        cli,
        f"checkpoint new passengers Titanic.warning -d {root_dir}",
        input="1\n1\n",
        catch_exceptions=False,
    )
    stdout = result.stdout
    assert result.exit_code == 0
    assert "A checkpoint named `passengers` was added to your project" in stdout

    assert mock_emit.call_count == 2
    assert mock_emit.call_args_list == [
        mock.call({
            "event_payload": {},
            "event": "data_context.__init__",
            "success": True
        }),
        mock.call({
            "event": "cli.checkpoint.new",
            "event_payload": {},
            "success": True
        }),
    ]
    expected_checkpoint = os.path.join(
        root_dir,
        DataContextConfigDefaults.CHECKPOINTS_BASE_DIRECTORY.value,
        "passengers.yml",
    )
    assert os.path.isfile(expected_checkpoint)

    # Newup a context for additional assertions
    context = DataContext(root_dir)
    assert context.list_checkpoints() == ["passengers"]

    assert_no_logging_messages_or_tracebacks(
        my_caplog=caplog,
        click_result=result,
        allowed_deprecation_message=VALIDATION_OPERATORS_DEPRECATION_MESSAGE,
    )
Пример #4
0
def _verify_checkpoint_does_not_exist(context: DataContext, checkpoint: str,
                                      usage_event: str) -> None:
    try:
        if checkpoint in context.list_checkpoints():
            toolkit.exit_with_failure_message_and_stats(
                context,
                usage_event,
                f"A checkpoint named `{checkpoint}` already exists. Please choose a new name.",
            )
    except InvalidTopLevelConfigKeyError as e:
        toolkit.exit_with_failure_message_and_stats(context, usage_event,
                                                    f"<red>{e}</red>")
def test_checkpoint_new_specify_datasource(mock_emit, caplog,
                                           titanic_data_context_stats_enabled,
                                           titanic_expectation_suite):
    context = titanic_data_context_stats_enabled
    root_dir = context.root_directory
    assert context.list_checkpoints() == []
    context.save_expectation_suite(titanic_expectation_suite)
    assert context.list_expectation_suite_names() == ["Titanic.warning"]
    mock_emit.reset_mock()

    runner = CliRunner(mix_stderr=False)
    result = runner.invoke(
        cli,
        f"checkpoint new passengers Titanic.warning -d {root_dir} --datasource mydatasource",
        input="1\n1\n",
        catch_exceptions=False,
    )
    stdout = result.stdout
    assert result.exit_code == 0
    assert "A checkpoint named `passengers` was added to your project" in stdout

    assert mock_emit.call_count == 2
    assert mock_emit.call_args_list == [
        mock.call({
            "event_payload": {},
            "event": "data_context.__init__",
            "success": True
        }),
        mock.call({
            "event": "cli.checkpoint.new",
            "event_payload": {},
            "success": True
        }),
    ]
    expected_checkpoint = os.path.join(root_dir, context.CHECKPOINTS_DIR,
                                       "passengers.yml")
    assert os.path.isfile(expected_checkpoint)

    # Newup a context for additional assertions
    context = DataContext(root_dir)
    assert context.list_checkpoints() == ["passengers"]

    assert_no_logging_messages_or_tracebacks(caplog, result)
def test_checkpoint_new_happy_path_generates_checkpoint_yml_with_comments_with_ge_config_v2(
    mock_emit,
    caplog,
    titanic_data_context_stats_enabled_config_version_2,
    titanic_expectation_suite,
):
    context = titanic_data_context_stats_enabled_config_version_2
    root_dir = context.root_directory
    assert context.list_checkpoints() == []
    context.save_expectation_suite(titanic_expectation_suite)
    assert context.list_expectation_suite_names() == ["Titanic.warning"]
    mock_emit.reset_mock()

    runner = CliRunner(mix_stderr=False)
    result = runner.invoke(
        cli,
        f"checkpoint new passengers Titanic.warning -d {root_dir}",
        input="1\n1\n",
        catch_exceptions=False,
    )
    stdout = result.stdout
    assert result.exit_code == 0
    assert "A checkpoint named `passengers` was added to your project" in stdout

    assert mock_emit.call_count == 2
    assert mock_emit.call_args_list == [
        mock.call({
            "event_payload": {},
            "event": "data_context.__init__",
            "success": True
        }),
        mock.call({
            "event": "cli.checkpoint.new",
            "event_payload": {
                "api_version": "v2"
            },
            "success": True,
        }),
    ]
    expected_checkpoint = os.path.join(
        root_dir,
        DataContextConfigDefaults.CHECKPOINTS_BASE_DIRECTORY.value,
        "passengers.yml",
    )
    assert os.path.isfile(expected_checkpoint)

    # Newup a context for additional assertions
    context = DataContext(root_dir)
    assert context.list_checkpoints() == ["passengers"]

    with open(expected_checkpoint) as f:
        obs_file = f.read()

        # This is snapshot-ish to prove that comments remain in place
        # TODO: <Alex>ALEX</Alex>
        #     assert (
        #         """\
        # # This checkpoint was created by the command `great_expectations checkpoint new`.
        # #
        # # A checkpoint is a list of one or more batches paired with one or more
        # # Expectation Suites and a configurable Validation Operator.
        # #
        # # It can be run with the `great_expectations checkpoint run` command.
        # # You can edit this file to add batches of data and expectation suites.
        # #
        # # For more details please see
        # # https://docs.greatexpectations.io/en/latest/guides/how_to_guides/validation/how_to_add_validations_data_or_suites_to_a_checkpoint.html
        # validation_operator_name: action_list_operator
        # # Batches are a list of batch_kwargs paired with a list of one or more suite
        # # names. A checkpoint can have one or more batches. This makes deploying
        # # Great Expectations in your pipelines easy!
        # batches:
        #   - batch_kwargs:"""
        #         in obs_file
        #     )
        assert ("""\
batches:
  - batch_kwargs:""" in obs_file)

    assert "/data/Titanic.csv" in obs_file

    assert ("""datasource: mydatasource
      data_asset_name: Titanic
    expectation_suite_names:
      - Titanic.warning
""" in obs_file)

    assert_no_logging_messages_or_tracebacks(
        my_caplog=caplog,
        click_result=result,
        allowed_deprecation_message=
        LEGACY_CONFIG_DEFAULT_CHECKPOINT_STORE_MESSAGE,
    )
def test_checkpoint_new_happy_path_generates_checkpoint_yml_with_comments(
        mock_emit, caplog, titanic_data_context_stats_enabled,
        titanic_expectation_suite):
    context = titanic_data_context_stats_enabled
    root_dir = context.root_directory
    assert context.list_checkpoints() == []
    context.save_expectation_suite(titanic_expectation_suite)
    assert context.list_expectation_suite_names() == ["Titanic.warning"]
    mock_emit.reset_mock()

    runner = CliRunner(mix_stderr=False)
    result = runner.invoke(
        cli,
        f"checkpoint new passengers Titanic.warning -d {root_dir}",
        input="1\n1\n",
        catch_exceptions=False,
    )
    stdout = result.stdout
    assert result.exit_code == 0
    assert "A checkpoint named `passengers` was added to your project" in stdout

    assert mock_emit.call_count == 2
    assert mock_emit.call_args_list == [
        mock.call({
            "event_payload": {},
            "event": "data_context.__init__",
            "success": True
        }),
        mock.call({
            "event": "cli.checkpoint.new",
            "event_payload": {},
            "success": True
        }),
    ]
    expected_checkpoint = os.path.join(root_dir, context.CHECKPOINTS_DIR,
                                       "passengers.yml")
    assert os.path.isfile(expected_checkpoint)

    # Newup a context for additional assertions
    context = DataContext(root_dir)
    assert context.list_checkpoints() == ["passengers"]

    with open(expected_checkpoint, "r") as f:
        obs_file = f.read()

    # This is snapshot-ish to prove that comments remain in place
    assert ("""\
# This checkpoint was created by the command `great_expectations checkpoint new`.
#
# A checkpoint is a list of one or more batches paired with one or more
# Expectation Suites and a configurable Validation Operator.
#
# It can be run with the `great_expectations checkpoint run` command.
# You can edit this file to add batches of data and expectation suites.
#
# For more details please see
# https://docs.greatexpectations.io/en/latest/command_line.html#great-expectations-checkpoint-new-checkpoint-suite
validation_operator_name: action_list_operator
# Batches are a list of batch_kwargs paired with a list of one or more suite
# names. A checkpoint can have one or more batches. This makes deploying
# Great Expectations in your pipelines easy!
batches:
  - batch_kwargs:""" in obs_file)

    assert "/data/Titanic.csv" in obs_file

    assert ("""datasource: mydatasource
    expectation_suite_names: # one or more suites may validate against a single batch
      - Titanic.warning
""" in obs_file)

    assert_no_logging_messages_or_tracebacks(caplog, result)