def load_data_context_with_error_handling( directory: str, from_cli_upgrade_command: bool = False) -> DataContext: """Return a DataContext with good error handling and exit codes.""" # TODO consolidate all the myriad CLI tests into this try: context = DataContext(directory) return context except ge_exceptions.UnsupportedConfigVersionError as err: directory = directory or DataContext.find_context_root_dir() ge_config_version = DataContext.get_ge_config_version( context_root_dir=directory) upgrade_helper_class = (GE_UPGRADE_HELPER_VERSION_MAP.get( int(ge_config_version)) if ge_config_version else None) if (upgrade_helper_class and ge_config_version < MINIMUM_SUPPORTED_CONFIG_VERSION): upgrade_project( context_root_dir=directory, ge_config_version=ge_config_version, from_cli_upgrade_command=from_cli_upgrade_command, ) else: cli_message("<red>{}</red>".format(err.message)) sys.exit(1) except ( ge_exceptions.ConfigNotFoundError, ge_exceptions.InvalidConfigError, ) as err: cli_message("<red>{}</red>".format(err.message)) sys.exit(1) except ge_exceptions.PluginModuleNotFoundError as err: cli_message(err.cli_colored_message) sys.exit(1) except ge_exceptions.PluginClassNotFoundError as err: cli_message(err.cli_colored_message) sys.exit(1)
def load_data_context_with_error_handling( directory: str, from_cli_upgrade_command: bool = False ) -> DataContext: """Return a DataContext with good error handling and exit codes.""" try: context: DataContext = DataContext(context_root_dir=directory) ge_config_version: int = context.get_config().config_version if ( from_cli_upgrade_command and int(ge_config_version) < CURRENT_GE_CONFIG_VERSION ): directory = directory or context.root_directory ( increment_version, exception_occurred, ) = upgrade_project_one_version_increment( context_root_dir=directory, ge_config_version=ge_config_version, continuation_message=EXIT_UPGRADE_CONTINUATION_MESSAGE, from_cli_upgrade_command=from_cli_upgrade_command, ) if not exception_occurred and increment_version: context = DataContext(context_root_dir=directory) return context except ge_exceptions.UnsupportedConfigVersionError as err: directory = directory or DataContext.find_context_root_dir() ge_config_version = DataContext.get_ge_config_version( context_root_dir=directory ) upgrade_helper_class = ( GE_UPGRADE_HELPER_VERSION_MAP.get(int(ge_config_version)) if ge_config_version else None ) if upgrade_helper_class and ge_config_version < CURRENT_GE_CONFIG_VERSION: upgrade_project( context_root_dir=directory, ge_config_version=ge_config_version, from_cli_upgrade_command=from_cli_upgrade_command, ) else: cli_message("<red>{}</red>".format(err.message)) sys.exit(1) except ( ge_exceptions.ConfigNotFoundError, ge_exceptions.InvalidConfigError, ) as err: cli_message("<red>{}</red>".format(err.message)) sys.exit(1) except ge_exceptions.PluginModuleNotFoundError as err: cli_message(err.cli_colored_message) sys.exit(1) except ge_exceptions.PluginClassNotFoundError as err: cli_message(err.cli_colored_message) sys.exit(1) except ge_exceptions.InvalidConfigurationYamlError as err: cli_message(f"<red>{str(err)}</red>") sys.exit(1)
def test_upgrade_helper_intervention_on_cli_command(v10_project_directory, caplog): # test if cli detects out of date project and asks to run upgrade helper # decline upgrade and ensure config version was not modified runner = CliRunner(mix_stderr=False) result = runner.invoke( cli, ["suite", "list", "-d", v10_project_directory], input="n\n", catch_exceptions=False, ) stdout = result.stdout assert ( "Your project appears to have an out-of-date config version (1.0) - the version number must be at least 2." in stdout) assert "In order to proceed, your project must be upgraded." in stdout assert ( "Would you like to run the Upgrade Helper to bring your project up-to-date? [Y/n]:" in stdout) assert ( "Ok, exiting now. To upgrade at a later time, use the following command: [36mgreat_expectations project " "upgrade[0m" in stdout) assert ( "To learn more about the upgrade process, visit [" "36mhttps://docs.greatexpectations.io/en/latest/how_to_guides/migrating_versions.html" in stdout) assert_no_logging_messages_or_tracebacks(caplog, result) # make sure config version unchanged assert (DataContext.get_ge_config_version( context_root_dir=v10_project_directory) == 1) expected_project_tree_str = """\ great_expectations/ .gitignore great_expectations.yml checkpoints/ .gitkeep expectations/ .gitkeep notebooks/ .gitkeep plugins/ custom_store_backends/ __init__.py my_custom_store_backend.py uncommitted/ config_variables.yml data_docs/ local_site/ expectations/ .gitkeep static/ .gitkeep validations/ diabetic_data/ warning/ 20200430T191246.763896Z/ c3b4c5df224fef4b1a056a0f3b93aba5.html validations/ diabetic_data/ warning/ 20200430T191246.763896Z/ c3b4c5df224fef4b1a056a0f3b93aba5.json """ obs_project_tree_str = gen_directory_tree_str(v10_project_directory) assert obs_project_tree_str == expected_project_tree_str
def test_project_upgrade_with_exception(v10_project_directory, caplog): # test project upgrade that requires manual steps # copy v2 yml shutil.copy( file_relative_path( __file__, "../../test_fixtures/upgrade_helper/great_expectations_v1_basic_with_exception" ".yml", ), os.path.join(v10_project_directory, "great_expectations.yml"), ) runner = CliRunner(mix_stderr=False) result = runner.invoke( cli, ["project", "upgrade", "-d", v10_project_directory], input="\n", catch_exceptions=False, ) stdout = result.stdout with open( file_relative_path( __file__, "../../test_fixtures/upgrade_helper/test_project_upgrade_with_exception_expected_stdout.fixture", )) as f: expected_stdout = f.read() expected_stdout = expected_stdout.replace("GE_PROJECT_DIR", v10_project_directory) assert stdout == expected_stdout expected_project_tree_str = """\ great_expectations/ .gitignore great_expectations.yml checkpoints/ .gitkeep expectations/ .gitkeep notebooks/ .gitkeep plugins/ custom_store_backends/ __init__.py my_custom_store_backend.py uncommitted/ config_variables.yml data_docs/ local_site/ expectations/ .gitkeep static/ .gitkeep validations/ diabetic_data/ warning/ 20200430T191246.763896Z/ 20200430T191246.763896Z/ c3b4c5df224fef4b1a056a0f3b93aba5.html logs/ project_upgrades/ UpgradeHelperV11_20190926T134241.000000Z.json validations/ diabetic_data/ warning/ 20200430T191246.763896Z/ 20200430T191246.763896Z/ c3b4c5df224fef4b1a056a0f3b93aba5.json """ obs_project_tree_str = gen_directory_tree_str(v10_project_directory) assert obs_project_tree_str == expected_project_tree_str # make sure config number not incremented assert (DataContext.get_ge_config_version( context_root_dir=v10_project_directory) == 1) with open( file_relative_path( __file__, "../../test_fixtures/upgrade_helper/UpgradeHelperV11_basic_upgrade_with_exception_log" ".json", )) as f: expected_upgrade_log_dict = json.load(f) expected_upgrade_log_str = json.dumps(expected_upgrade_log_dict) expected_upgrade_log_str = expected_upgrade_log_str.replace( "GE_PROJECT_DIR", v10_project_directory) expected_upgrade_log_str = expected_upgrade_log_str.replace( "GE_PATH", os.path.split(great_expectations.__file__)[0]) expected_upgrade_log_dict = json.loads(expected_upgrade_log_str) with open( f"{v10_project_directory}/uncommitted/logs/project_upgrades/UpgradeHelperV11_20190926T134241.000000Z" f".json") as f: obs_upgrade_log_dict = json.load(f) obs_upgrade_log_dict["exceptions"][0]["exception_message"] = "" assert obs_upgrade_log_dict == expected_upgrade_log_dict
def test_v2_to_v3_project_upgrade(v20_project_directory, caplog): # test project upgrade that requires no manual steps runner = CliRunner(mix_stderr=False) result = runner.invoke( cli, ["project", "upgrade", "-d", v20_project_directory], input="\n", catch_exceptions=False, ) stdout = result.stdout with open( file_relative_path( __file__, "../../../test_fixtures/upgrade_helper/test_v2_to_v3_project_upgrade_expected_v012_stdout.fixture", )) as f: expected_stdout = f.read() expected_stdout = expected_stdout.replace("GE_PROJECT_DIR", v20_project_directory) assert stdout == expected_stdout expected_project_tree_str = """\ great_expectations/ .gitignore great_expectations.yml checkpoints/ .gitkeep my_checkpoint.yml titanic_checkpoint_0.yml titanic_checkpoint_1.yml titanic_checkpoint_2.yml expectations/ .ge_store_backend_id .gitkeep notebooks/ .gitkeep pandas/ validation_playground.ipynb spark/ validation_playground.ipynb sql/ validation_playground.ipynb plugins/ custom_data_docs/ styles/ data_docs_custom_styles.css uncommitted/ config_variables.yml data_docs/ local_site/ expectations/ .gitkeep static/ .gitkeep validations/ diabetic_data/ warning/ 20200430T191246.763896Z/ c3b4c5df224fef4b1a056a0f3b93aba5.html logs/ project_upgrades/ UpgradeHelperV13_20210119T132639.000000Z.json validations/ .ge_store_backend_id diabetic_data/ warning/ 20200430T191246.763896Z/ c3b4c5df224fef4b1a056a0f3b93aba5.json """ obs_project_tree_str = gen_directory_tree_str(v20_project_directory) assert obs_project_tree_str == expected_project_tree_str # make sure config number incremented assert (DataContext.get_ge_config_version( context_root_dir=v20_project_directory) == 3) with open( file_relative_path( __file__, "../../../test_fixtures/upgrade_helper/UpgradeHelperV13_basic_upgrade_log.json", )) as f: expected_upgrade_log_dict = json.load(f) expected_upgrade_log_str = json.dumps(expected_upgrade_log_dict) expected_upgrade_log_str = expected_upgrade_log_str.replace( "GE_PROJECT_DIR", v20_project_directory) expected_upgrade_log_dict = json.loads(expected_upgrade_log_str) with open( f"{v20_project_directory}/uncommitted/logs/project_upgrades/UpgradeHelperV13_20210119T132639.000000Z.json" ) as f: obs_upgrade_log_dict = json.load(f) assert obs_upgrade_log_dict == expected_upgrade_log_dict
def test_project_upgrade_with_manual_steps(v10_project_directory, caplog, sa, postgresql_engine): # This test requires sqlalchemy because it includes database backends configured # test project upgrade that requires manual steps # copy v2 yml shutil.copy( file_relative_path( __file__, "../../../test_fixtures/upgrade_helper/great_expectations_v1_needs_manual_upgrade.yml", ), os.path.join(v10_project_directory, "great_expectations.yml"), ) runner = CliRunner(mix_stderr=False) result = runner.invoke( cli, ["project", "upgrade", "-d", v10_project_directory], input="\n", catch_exceptions=False, ) stdout = result.stdout with open( file_relative_path( __file__, "../../../test_fixtures/upgrade_helper/test_project_upgrade_with_manual_steps_expected_v012_stdout.fixture", )) as f: expected_stdout = f.read() expected_stdout = expected_stdout.replace("GE_PROJECT_DIR", v10_project_directory) assert stdout == expected_stdout pycache_dir_path = os.path.join(v10_project_directory, "plugins", "custom_store_backends", "__pycache__") try: shutil.rmtree(pycache_dir_path) except FileNotFoundError: pass expected_project_tree_str = """\ great_expectations/ .gitignore great_expectations.yml checkpoints/ .gitkeep expectations/ .ge_store_backend_id .gitkeep notebooks/ .gitkeep plugins/ custom_store_backends/ __init__.py my_custom_store_backend.py uncommitted/ config_variables.yml data_docs/ local_site/ expectations/ .gitkeep static/ .gitkeep validations/ diabetic_data/ warning/ 20200430T191246.763896Z/ 20200430T191246.763896Z/ c3b4c5df224fef4b1a056a0f3b93aba5.html logs/ project_upgrades/ UpgradeHelperV11_20190926T134241.000000Z.json validations/ .ge_store_backend_id diabetic_data/ warning/ 20200430T191246.763896Z/ 20200430T191246.763896Z/ c3b4c5df224fef4b1a056a0f3b93aba5.json """ obs_project_tree_str = gen_directory_tree_str(v10_project_directory) assert obs_project_tree_str == expected_project_tree_str # make sure config number not incremented assert (DataContext.get_ge_config_version( context_root_dir=v10_project_directory) == 1) with open( file_relative_path( __file__, "../../../test_fixtures/upgrade_helper/UpgradeHelperV11_manual_steps_upgrade_log.json", )) as f: expected_upgrade_log_dict = json.load(f) expected_upgrade_log_str = json.dumps(expected_upgrade_log_dict) expected_upgrade_log_str = expected_upgrade_log_str.replace( "GE_PROJECT_DIR", v10_project_directory) expected_upgrade_log_dict = json.loads(expected_upgrade_log_str) with open( f"{v10_project_directory}/uncommitted/logs/project_upgrades/UpgradeHelperV11_20190926T134241.000000Z.json" ) as f: obs_upgrade_log_dict = json.load(f) assert obs_upgrade_log_dict == expected_upgrade_log_dict
def test_basic_project_upgrade(v10_project_directory, caplog): # test project upgrade that requires no manual steps runner: CliRunner = CliRunner(mix_stderr=False) result: Result = runner.invoke( cli, ["-c", v10_project_directory, "--v3-api", "project", "upgrade"], input="\n", catch_exceptions=False, ) stdout: str = result.stdout with open( file_relative_path( __file__, "../../test_fixtures/upgrade_helper/test_basic_project_upgrade_expected_stdout.fixture", )) as f: expected_stdout: str = f.read() expected_stdout = expected_stdout.replace("GE_PROJECT_DIR", v10_project_directory) assert stdout == expected_stdout expected_project_tree_str: str = """\ great_expectations/ .gitignore great_expectations.yml checkpoints/ .gitkeep expectations/ .ge_store_backend_id .gitkeep notebooks/ .gitkeep plugins/ custom_store_backends/ __init__.py my_custom_store_backend.py uncommitted/ config_variables.yml data_docs/ local_site/ expectations/ .gitkeep static/ .gitkeep validations/ diabetic_data/ warning/ 20200430T191246.763896Z/ 20200430T191246.763896Z/ c3b4c5df224fef4b1a056a0f3b93aba5.html logs/ project_upgrades/ UpgradeHelperV11_20190926T134241.000000Z.json UpgradeHelperV13_20190926T134241.000000Z.json validations/ .ge_store_backend_id diabetic_data/ warning/ 20200430T191246.763896Z/ 20200430T191246.763896Z/ c3b4c5df224fef4b1a056a0f3b93aba5.json """ obs_project_tree_str: str = gen_directory_tree_str( startpath=v10_project_directory) assert obs_project_tree_str == expected_project_tree_str # make sure config number incremented assert (DataContext.get_ge_config_version( context_root_dir=v10_project_directory) == 3.0) with open( file_relative_path( __file__, "../../test_fixtures/upgrade_helper/UpgradeHelperV11_basic_upgrade_log.json", )) as f: expected_upgrade_log_dict: dict = json.load(f) expected_upgrade_log_str: str = json.dumps(expected_upgrade_log_dict) expected_upgrade_log_str = expected_upgrade_log_str.replace( "GE_PROJECT_DIR", v10_project_directory) expected_upgrade_log_dict: dict = json.loads(expected_upgrade_log_str) with open( f"{v10_project_directory}/uncommitted/logs/project_upgrades/UpgradeHelperV11_20190926T134241.000000Z.json" ) as f: obs_upgrade_log_dict: dict = json.load(f) assert obs_upgrade_log_dict == expected_upgrade_log_dict