def event_log_storage(self): # pylint: disable=arguments-differ # make the temp dir in the cwd since default temp roots # have issues with FS notif based event log watching with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmpdir_path: storage = ConsolidatedSqliteEventLogStorage(tmpdir_path) try: yield storage finally: storage.dispose()
def test_execute_display_command(): with tempfile.TemporaryDirectory() as temp_dir: run_store = SqliteRunStorage.from_local(temp_dir) event_store = ConsolidatedSqliteEventLogStorage(temp_dir) compute_log_manager = LocalComputeLogManager(temp_dir) instance = DagsterInstance( instance_type=InstanceType.PERSISTENT, local_artifact_storage=LocalArtifactStorage(temp_dir), run_storage=run_store, event_storage=event_store, compute_log_manager=compute_log_manager, run_coordinator=DefaultRunCoordinator(), run_launcher=DefaultRunLauncher(), ) run_config = { "solids": { "create_string_1_asset": {"config": {"input_str": "apple"}}, "take_string_1_asset": {"config": {"input_str": "apple"}}, }, "resources": {"object_manager": {"config": {"base_dir": temp_dir}}}, } # write run config to temp file # file is temp because intermediate storage directory is temporary with open(os.path.join(temp_dir, "pipeline_config.yaml"), "w") as f: f.write(yaml.dump(run_config)) kwargs = { "config": (os.path.join(temp_dir, "pipeline_config.yaml"),), "pipeline": "asset_pipeline", "python_file": file_relative_path( __file__, "../../core_tests/execution_tests/memoized_dev_loop_pipeline.py" ), "tags": '{"dagster/is_memoized_run": "true"}', } with Capturing() as output: execute_list_versions_command(kwargs=kwargs, instance=instance) assert output # execute the pipeline once so that addresses have been populated. result = execute_pipeline( asset_pipeline, run_config=run_config, mode="only_mode", tags={"dagster/is_memoized_run": "true"}, instance=instance, ) assert result.success with Capturing() as output: execute_list_versions_command(kwargs=kwargs, instance=instance) assert output
def get_ephemeral_instance(temp_dir): run_store = SqliteRunStorage.from_local(temp_dir) event_store = ConsolidatedSqliteEventLogStorage(temp_dir) compute_log_manager = LocalComputeLogManager(temp_dir) instance = DagsterInstance( instance_type=InstanceType.PERSISTENT, local_artifact_storage=LocalArtifactStorage(temp_dir), run_storage=run_store, event_storage=event_store, compute_log_manager=compute_log_manager, run_launcher=DefaultRunLauncher(), run_coordinator=DefaultRunCoordinator(), ) return instance
def event_log_storage(self): # pylint: disable=arguments-differ with tempfile.TemporaryDirectory() as tmpdir_path: yield ConsolidatedSqliteEventLogStorage(tmpdir_path)
def create_consolidated_sqlite_run_event_log_storage(): with seven.TemporaryDirectory() as tmpdir_path: yield ConsolidatedSqliteEventLogStorage(tmpdir_path)
def create_consolidated_sqlite_event_log_instance(): with seven.TemporaryDirectory() as temp_dir: asset_storage = ConsolidatedSqliteEventLogStorage(temp_dir) instance = get_instance(temp_dir, asset_storage) yield [instance, asset_storage]
def test_asset_key_structure(): src_dir = file_relative_path(__file__, "compat_tests/snapshot_0_9_16_asset_key_structure") with copy_directory(src_dir) as test_dir: asset_storage = ConsolidatedSqliteEventLogStorage(test_dir) asset_keys = asset_storage.get_all_asset_keys() assert len(asset_keys) == 5 # get a structured asset key asset_key = AssetKey(["dashboards", "cost_dashboard"]) # check that backcompat events are read assert asset_storage.has_asset_key(asset_key) events = asset_storage.get_asset_events(asset_key) assert len(events) == 1 run_ids = asset_storage.get_asset_run_ids(asset_key) assert len(run_ids) == 1 # check that backcompat events are merged with newly stored events run_id = "fake_run_id" asset_storage.store_event(_materialization_event_record(run_id, asset_key)) assert asset_storage.has_asset_key(asset_key) events = asset_storage.get_asset_events(asset_key) assert len(events) == 2 run_ids = asset_storage.get_asset_run_ids(asset_key) assert len(run_ids) == 2
def test_dev_loop_changing_versions(): with seven.TemporaryDirectory() as temp_dir: run_store = SqliteRunStorage.from_local(temp_dir) event_store = ConsolidatedSqliteEventLogStorage(temp_dir) compute_log_manager = LocalComputeLogManager(temp_dir) instance = DagsterInstance( instance_type=InstanceType.PERSISTENT, local_artifact_storage=LocalArtifactStorage(temp_dir), run_storage=run_store, event_storage=event_store, compute_log_manager=compute_log_manager, run_launcher=CliApiRunLauncher(), ) run_config = { "solids": { "create_string_1": {"config": {"input_str": "apple", "base_dir": temp_dir}}, "create_string_2": {"config": {"input_str": "apple", "base_dir": temp_dir}}, "take_string_1": {"config": {"input_str": "apple", "base_dir": temp_dir}}, "take_string_2": {"config": {"input_str": "apple", "base_dir": temp_dir}}, "take_string_two_inputs": {"config": {"input_str": "apple", "base_dir": temp_dir}}, }, "intermediate_storage": {"filesystem": {"config": {"base_dir": temp_dir}}}, } result = execute_pipeline( basic_pipeline, run_config=run_config, mode="only_mode", tags={"dagster/is_memoized_run": "true"}, instance=instance, ) assert result.success assert not get_step_keys_to_execute(instance, basic_pipeline, run_config, "only_mode") run_config["solids"]["take_string_1"]["config"]["input_str"] = "banana" assert set( get_step_keys_to_execute(instance, basic_pipeline, run_config, "only_mode") ) == set(["take_string_1.compute", "take_string_two_inputs.compute"]) result2 = execute_pipeline( basic_pipeline, run_config=run_config, mode="only_mode", tags={"dagster/is_memoized_run": "true"}, instance=instance, ) assert result2.success assert not get_step_keys_to_execute(instance, basic_pipeline, run_config, "only_mode") run_config["solids"]["take_string_two_inputs"]["config"]["input_str"] = "banana" assert get_step_keys_to_execute(instance, basic_pipeline, run_config, "only_mode") == [ "take_string_two_inputs.compute" ] result3 = execute_pipeline( basic_pipeline, run_config=run_config, mode="only_mode", tags={"dagster/is_memoized_run": "true"}, instance=instance, ) assert result3.success assert not get_step_keys_to_execute(instance, basic_pipeline, run_config, "only_mode")
def test_dev_loop_changing_versions(): with seven.TemporaryDirectory() as temp_dir: run_store = SqliteRunStorage.from_local(temp_dir) event_store = ConsolidatedSqliteEventLogStorage(temp_dir) compute_log_manager = LocalComputeLogManager(temp_dir) instance = DagsterInstance( instance_type=InstanceType.PERSISTENT, local_artifact_storage=LocalArtifactStorage(temp_dir), run_storage=run_store, event_storage=event_store, compute_log_manager=compute_log_manager, run_launcher=DefaultRunLauncher(), run_coordinator=DefaultRunCoordinator(), ) run_config = { "solids": { "create_string_1_asset": { "config": { "input_str": "apple" } }, "take_string_1_asset": { "config": { "input_str": "apple" } }, }, "resources": { "object_manager": { "config": { "base_dir": temp_dir } } }, } result = execute_pipeline( asset_pipeline, run_config=run_config, mode="only_mode", tags={"dagster/is_memoized_run": "true"}, instance=instance, ) assert result.success assert not get_step_keys_to_execute(asset_pipeline, run_config, "only_mode") run_config["solids"]["take_string_1_asset"]["config"][ "input_str"] = "banana" assert get_step_keys_to_execute( asset_pipeline, run_config, "only_mode") == ["take_string_1_asset"] result = execute_pipeline( asset_pipeline, run_config=run_config, mode="only_mode", tags={"dagster/is_memoized_run": "true"}, instance=instance, ) assert result.success assert not get_step_keys_to_execute(asset_pipeline, run_config, "only_mode")