def test_bad_should_execute(): with seven.TemporaryDirectory() as temp_dir: with environ({'DAGSTER_HOME': temp_dir}): instance = DagsterInstance.get() recon_repo = ReconstructableRepository.for_file( __file__, 'the_repo') bad_should_execute = recon_repo.get_reconstructable_schedule( 'bad_should_execute_schedule') result = sync_launch_scheduled_execution( bad_should_execute.get_origin()) assert isinstance(result, ScheduledExecutionFailed) assert ( 'Error occurred during the execution of should_execute for schedule bad_should_execute_schedule' in result.errors[0].to_string()) assert not result.run_id ticks = instance.get_schedule_ticks( bad_should_execute.get_origin_id()) assert ticks[0].status == ScheduleTickStatus.FAILURE assert ( 'Error occurred during the execution of should_execute for schedule bad_should_execute_schedule' in ticks[0].error.message)
def test_bad_load(): with _default_instance() as instance: instance = DagsterInstance.get() working_directory = os.path.dirname(__file__) loadable_target_origin = LoadableTargetOrigin( executable_path=sys.executable, python_file=__file__, attribute="doesnt_exist", working_directory=working_directory, ) repo_origin = ExternalRepositoryOrigin( ManagedGrpcPythonEnvRepositoryLocationOrigin( loadable_target_origin=loadable_target_origin), "doesnt_exist", ) schedule_origin = repo_origin.get_job_origin("also_doesnt_exist") result = sync_launch_scheduled_execution(schedule_origin) assert isinstance(result, ScheduledExecutionFailed) assert "doesnt_exist not found at module scope in file" in result.errors[ 0].to_string() ticks = instance.get_job_ticks(schedule_origin.get_id()) assert ticks[0].status == JobTickStatus.FAILURE assert "doesnt_exist not found at module scope in file" in ticks[ 0].error.message
def test_schedules(): with seven.TemporaryDirectory() as temp_dir: with environ({'DAGSTER_HOME': temp_dir}): with open(os.path.join(temp_dir, 'dagster.yaml'), 'w') as fd: yaml.dump( { 'scheduler': { 'module': 'dagster.utils.test', 'class': 'FilesystemTestScheduler', 'config': { 'base_dir': temp_dir }, } }, fd, default_flow_style=False, ) recon_repo = ReconstructableRepository.from_legacy_repository_yaml( file_relative_path(__file__, '../repository.yaml')) for schedule_name in [ 'many_events_every_min', 'pandas_hello_world_hourly', ]: schedule = recon_repo.get_reconstructable_schedule( schedule_name) result = sync_launch_scheduled_execution(schedule.get_origin()) assert isinstance(result, ScheduledExecutionSuccess)
def test_launch_successful_execution_telemetry(schedule_origin_context): with _default_instance(): with schedule_origin_context("simple_schedule") as schedule_origin: sync_launch_scheduled_execution(schedule_origin) event_log_path = "{logs_dir}/event.log".format( logs_dir=get_dir_from_dagster_home("logs") ) with open(event_log_path, "r") as f: event_log = f.readlines() assert len(event_log) == 2 message_start = json.loads(event_log[0]) message_end = json.loads(event_log[1]) assert message_start.get("action") == "_launch_scheduled_execution_started" assert message_end.get("action") == "_launch_scheduled_execution_ended"
def test_bad_load(): with seven.TemporaryDirectory() as temp_dir: with environ({'DAGSTER_HOME': temp_dir}): instance = DagsterInstance.get() recon_repo = ReconstructableRepository.for_file( __file__, 'doesnt_exist') schedule = recon_repo.get_reconstructable_schedule( 'also_doesnt_exist') with pytest.raises(DagsterSubprocessError): sync_launch_scheduled_execution(schedule.get_origin()) ticks = instance.get_schedule_ticks(schedule.get_origin_id()) assert ticks[0].status == ScheduleTickStatus.FAILURE assert 'doesnt_exist not found at module scope in file' in ticks[ 0].error.message
def test_skip(schedule_origin_context): with _default_instance() as instance: with schedule_origin_context("skip_schedule") as schedule_origin: result = sync_launch_scheduled_execution(schedule_origin) assert isinstance(result, ScheduledExecutionSkipped) ticks = instance.get_schedule_ticks(schedule_origin.get_id()) assert ticks[0].status == ScheduleTickStatus.SKIPPED
def test_skip(schedule_origin_context): with schedule_origin_context('skip_schedule') as schedule_origin: instance = DagsterInstance.get() result = sync_launch_scheduled_execution(schedule_origin) assert isinstance(result, ScheduledExecutionSkipped) ticks = instance.get_schedule_ticks(schedule_origin.get_id()) assert ticks[0].status == ScheduleTickStatus.SKIPPED
def test_bad_load_grpc(): with _default_instance() as instance: with grpc_schedule_origin("doesnt_exist") as schedule_origin: result = sync_launch_scheduled_execution(schedule_origin) assert isinstance(result, ScheduledExecutionFailed) assert "Could not find schedule named doesnt_exist" in result.errors[0].to_string() ticks = instance.get_schedule_ticks(schedule_origin.get_id()) assert ticks[0].status == ScheduleTickStatus.FAILURE assert "Could not find schedule named doesnt_exist" in ticks[0].error.message
def test_launch_scheduled_execution(schedule_origin_context): with schedule_origin_context('simple_schedule') as schedule_origin: instance = DagsterInstance.get() result = sync_launch_scheduled_execution(schedule_origin) assert isinstance(result, ScheduledExecutionSuccess) run = instance.get_run_by_id(result.run_id) assert run is not None ticks = instance.get_schedule_ticks(schedule_origin.get_id()) assert ticks[0].status == ScheduleTickStatus.SUCCESS
def test_launch_successful_execution(schedule_origin_context): with _default_instance() as instance: with schedule_origin_context("simple_schedule") as schedule_origin: result = sync_launch_scheduled_execution(schedule_origin) assert isinstance(result, ScheduledExecutionSuccess) run = instance.get_run_by_id(result.run_id) assert run is not None ticks = instance.get_schedule_ticks(schedule_origin.get_id()) assert ticks[0].status == ScheduleTickStatus.SUCCESS
def test_wrong_config(schedule_origin_context): with schedule_origin_context('wrong_config_schedule') as schedule_origin: instance = DagsterInstance.get() result = sync_launch_scheduled_execution(schedule_origin) assert isinstance(result, ScheduledExecutionFailed) assert 'DagsterInvalidConfigError' in result.errors[0].to_string() run = instance.get_run_by_id(result.run_id) assert run.is_failure ticks = instance.get_schedule_ticks(schedule_origin.get_id()) assert ticks[0].status == ScheduleTickStatus.SUCCESS
def test_wrong_config(schedule_origin_context): with _default_instance() as instance: with schedule_origin_context( "wrong_config_schedule") as schedule_origin: result = sync_launch_scheduled_execution(schedule_origin) assert isinstance(result, ScheduledExecutionFailed) assert "DagsterInvalidConfigError" in result.errors[0].to_string() run = instance.get_run_by_id(result.run_id) assert run.is_failure ticks = instance.get_job_ticks(schedule_origin.get_id()) assert ticks[0].status == JobTickStatus.SUCCESS
def test_skip(): with seven.TemporaryDirectory() as temp_dir: with environ({'DAGSTER_HOME': temp_dir}): instance = DagsterInstance.get() recon_repo = ReconstructableRepository.for_file( __file__, 'the_repo') skip = recon_repo.get_reconstructable_schedule('skip_schedule') result = sync_launch_scheduled_execution(skip.get_origin()) assert isinstance(result, ScheduledExecutionSkipped) ticks = instance.get_schedule_ticks(skip.get_origin_id()) assert ticks[0].status == ScheduleTickStatus.SKIPPED
def test_bad_load(): with _default_instance() as instance: instance = DagsterInstance.get() working_directory = os.path.dirname(__file__) recon_repo = ReconstructableRepository.for_file(__file__, "doesnt_exist", working_directory) schedule = recon_repo.get_reconstructable_schedule("also_doesnt_exist") result = sync_launch_scheduled_execution(schedule.get_origin()) assert isinstance(result, ScheduledExecutionFailed) assert "doesnt_exist not found at module scope in file" in result.errors[0].to_string() ticks = instance.get_schedule_ticks(schedule.get_origin_id()) assert ticks[0].status == ScheduleTickStatus.FAILURE assert "doesnt_exist not found at module scope in file" in ticks[0].error.message
def test_grpc_server_down(): with _default_instance() as instance: down_grpc_repo_origin = RepositoryGrpcServerOrigin( host="localhost", port=find_free_port(), socket=None, repository_name="down_repo" ) down_grpc_schedule_origin = down_grpc_repo_origin.get_schedule_origin("down_schedule") instance = DagsterInstance.get() result = sync_launch_scheduled_execution(down_grpc_schedule_origin) assert isinstance(result, ScheduledExecutionFailed) assert "failed to connect to all addresses" in result.errors[0].to_string() ticks = instance.get_schedule_ticks(down_grpc_schedule_origin.get_id()) assert ticks[0].status == ScheduleTickStatus.FAILURE assert "failed to connect to all addresses" in ticks[0].error.message
def test_launch_scheduled_execution(): with seven.TemporaryDirectory() as temp_dir: with environ({'DAGSTER_HOME': temp_dir}): instance = DagsterInstance.get() recon_repo = ReconstructableRepository.for_file( __file__, 'the_repo') simple = recon_repo.get_reconstructable_schedule('simple_schedule') result = sync_launch_scheduled_execution(simple.get_origin()) assert isinstance(result, ScheduledExecutionSuccess) run = instance.get_run_by_id(result.run_id) assert run.is_success ticks = instance.get_schedule_ticks(simple.get_origin_id()) assert ticks[0].status == ScheduleTickStatus.SUCCESS
def test_bad_should_execute(schedule_origin_context): with _default_instance() as instance: with schedule_origin_context( "bad_should_execute_schedule") as schedule_origin: result = sync_launch_scheduled_execution(schedule_origin) assert isinstance(result, ScheduledExecutionFailed) assert ( "Error occurred during the execution of should_execute for schedule bad_should_execute_schedule" in result.errors[0].to_string()) assert not result.run_id ticks = instance.get_schedule_ticks(schedule_origin.get_id()) assert ticks[0].status == ScheduleTickStatus.FAILURE assert ( "Error occurred during the execution of should_execute for schedule bad_should_execute_schedule" in ticks[0].error.message)
def test_bad_env_fn(schedule_origin_context): with schedule_origin_context('bad_env_fn_schedule') as schedule_origin: instance = DagsterInstance.get() result = sync_launch_scheduled_execution(schedule_origin) assert isinstance(result, ScheduledExecutionFailed) assert ( 'Error occurred during the execution of run_config_fn for schedule bad_env_fn_schedule' in result.errors[0].to_string()) assert not result.run_id ticks = instance.get_schedule_ticks(schedule_origin.get_id()) assert ticks[0].status == ScheduleTickStatus.FAILURE assert ( 'Error occurred during the execution of run_config_fn for schedule bad_env_fn_schedule' in ticks[0].error.message)
def test_wrong_config(): with seven.TemporaryDirectory() as temp_dir: with environ({'DAGSTER_HOME': temp_dir}): instance = DagsterInstance.get() recon_repo = ReconstructableRepository.for_file( __file__, 'the_repo') wrong_config = recon_repo.get_reconstructable_schedule( 'wrong_config_schedule') result = sync_launch_scheduled_execution(wrong_config.get_origin()) assert isinstance(result, ScheduledExecutionFailed) assert 'DagsterInvalidConfigError' in result.errors[0].to_string() run = instance.get_run_by_id(result.run_id) assert run.is_failure ticks = instance.get_schedule_ticks(wrong_config.get_origin_id()) assert ticks[0].status == ScheduleTickStatus.SUCCESS
def test_bad_load(): with seven.TemporaryDirectory() as temp_dir: with environ({'DAGSTER_HOME': temp_dir}): instance = DagsterInstance.get() working_directory = os.path.dirname(__file__) recon_repo = ReconstructableRepository.for_file( __file__, 'doesnt_exist', working_directory) schedule = recon_repo.get_reconstructable_schedule( 'also_doesnt_exist') result = sync_launch_scheduled_execution(schedule.get_origin()) assert isinstance(result, ScheduledExecutionFailed) assert 'doesnt_exist not found at module scope in file' in result.errors[ 0].to_string() ticks = instance.get_schedule_ticks(schedule.get_origin_id()) assert ticks[0].status == ScheduleTickStatus.FAILURE assert 'doesnt_exist not found at module scope in file' in ticks[ 0].error.message
def test_launch_failure(schedule_origin_context): with instance_for_test(overrides={ "run_launcher": { "module": "dagster.core.test_utils", "class": "ExplodingRunLauncher", }, }, ) as instance: with schedule_origin_context("simple_schedule") as schedule_origin: result = sync_launch_scheduled_execution(schedule_origin) assert isinstance(result, ScheduledExecutionFailed) assert "NotImplementedError" in result.errors[0] run = instance.get_run_by_id(result.run_id) assert run is not None assert run.status == PipelineRunStatus.FAILURE ticks = instance.get_job_ticks(schedule_origin.get_id()) assert ticks[0].status == JobTickStatus.SUCCESS
def test_bad_env_fn(): with seven.TemporaryDirectory() as temp_dir: with environ({'DAGSTER_HOME': temp_dir}): instance = DagsterInstance.get() recon_repo = ReconstructableRepository.for_file( __file__, 'the_repo') bad_env_fn = recon_repo.get_reconstructable_schedule( 'bad_env_fn_schedule') result = sync_launch_scheduled_execution(bad_env_fn.get_origin()) assert isinstance(result, ScheduledExecutionFailed) assert ( 'Error occurred during the execution of run_config_fn for schedule bad_env_fn_schedule' in result.errors[0].to_string()) run = instance.get_run_by_id(result.run_id) assert run.is_failure ticks = instance.get_schedule_ticks(bad_env_fn.get_origin_id()) assert ticks[0].status == ScheduleTickStatus.SUCCESS
def test_grpc_server_down(): with seven.TemporaryDirectory() as temp_dir: with environ({'DAGSTER_HOME': temp_dir}): down_grpc_repo_origin = RepositoryGrpcServerOrigin( host='localhost', port=find_free_port(), socket=None, repository_name='down_repo') down_grpc_schedule_origin = down_grpc_repo_origin.get_schedule_origin( 'down_schedule') instance = DagsterInstance.get() result = sync_launch_scheduled_execution(down_grpc_schedule_origin) assert isinstance(result, ScheduledExecutionFailed) assert 'failed to connect to all addresses' in result.errors[ 0].to_string() ticks = instance.get_schedule_ticks( down_grpc_schedule_origin.get_id()) assert ticks[0].status == ScheduleTickStatus.FAILURE assert 'failed to connect to all addresses' in ticks[ 0].error.message