def test_start_schedule_manual_delete_debug( restore_cron_tab, snapshot # pylint:disable=unused-argument,redefined-outer-name ): with TemporaryDirectory() as tempdir: repository = RepositoryDefinition(name="test_repository", schedule_defs=define_schedules()) instance = define_scheduler_instance(tempdir) # Initialize scheduler reconcile_scheduler_state( python_path="fake path", repository_path="", repository=repository, instance=instance, ) instance.start_schedule(repository, "no_config_pipeline_every_min_schedule") # Manually delete the schedule from the crontab instance.scheduler._end_cron_job( # pylint: disable=protected-access instance, repository, instance.get_schedule_by_name( repository, "no_config_pipeline_every_min_schedule"), ) # Check debug command snapshot.assert_match(instance.scheduler_debug_info())
def test_stop_schedule_unsuccessful(restore_cron_tab, ): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: repository = RepositoryDefinition(name="test_repository", schedule_defs=define_schedules()) instance = define_scheduler_instance(tempdir) # Initialize scheduler reconcile_scheduler_state( python_path=sys.executable, repository_path="", repository=repository, instance=instance, ) def do_nothing(*_): pass instance._scheduler._end_cron_job = do_nothing # pylint: disable=protected-access instance.start_schedule(repository, "no_config_pipeline_every_min_schedule") # End schedule with pytest.raises( DagsterInvariantViolationError, match= "Attempted to remove cron job for schedule no_config_pipeline_every_min_schedule, but failed.", ): instance.stop_schedule(repository, "no_config_pipeline_every_min_schedule")
def test_start_schedule_manual_add_debug( restore_cron_tab, snapshot # pylint:disable=unused-argument,redefined-outer-name ): with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) external_repo = get_test_external_repo() # Initialize scheduler instance.reconcile_scheduler_state(external_repo) # Manually add the schedule from to the crontab instance.scheduler._start_cron_job( # pylint: disable=protected-access instance, external_repo.get_external_schedule( "no_config_pipeline_every_min_schedule")) # Check debug command debug_info = instance.scheduler_debug_info() assert len(debug_info.errors) == 1 # Reconcile should fix error instance.reconcile_scheduler_state(external_repo) debug_info = instance.scheduler_debug_info() assert len(debug_info.errors) == 0
def check_cli_execute_file_pipeline(path, pipeline_fn_name, env_file=None): from dagster.core.test_utils import environ with TemporaryDirectory() as temp_dir: with environ({'DAGSTER_HOME': temp_dir}): cli_cmd = [ sys.executable, '-m', 'dagster', 'pipeline', 'execute', '-f', path, '-a', pipeline_fn_name, ] if env_file: cli_cmd.append('-c') cli_cmd.append(env_file) try: subprocess.check_output(cli_cmd) except subprocess.CalledProcessError as cpe: print(cpe) # pylint: disable=print-call raise cpe
def test_re_init(): with TemporaryDirectory() as tempdir: repository = RepositoryDefinition(name="test_repository", schedule_defs=define_schedules()) instance = define_scheduler_instance(tempdir) # Initialize scheduler reconcile_scheduler_state( python_path=sys.executable, repository_path="", repository=repository, instance=instance, ) # Start schedule schedule = instance.start_schedule(repository, "no_config_pipeline_every_min_schedule") # Re-initialize scheduler reconcile_scheduler_state( python_path=sys.executable, repository_path="", repository=repository, instance=instance, ) # Check schedules are saved to disk assert 'schedules' in os.listdir(tempdir) schedules = instance.all_schedules(repository) for schedule in schedules: assert "/bin/python" in schedule.python_path
def test_start_non_existent_schedule(restore_cron_tab, ): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) with pytest.raises(DagsterScheduleDoesNotExist): # Initialize scheduler instance.stop_schedule_and_update_storage_state("asdf")
def test_log_directory(restore_cron_tab): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) schedule_log_path = instance.logs_path_for_schedule( test_repository.name, "no_config_pipeline_every_min_schedule") assert schedule_log_path.endswith( "/schedules/logs/{repository_name}/{schedule_name}/scheduler.log". format( repository_name=test_repository.name, schedule_name="no_config_pipeline_every_min_schedule", )) # Initialize scheduler instance.reconcile_scheduler_state( python_path=sys.executable, repository_path=file_relative_path(__file__, '.../repository.yaml'), repository=test_repository, ) # Start schedule instance.start_schedule_and_update_storage_state( test_repository.name, "no_config_pipeline_every_min_schedule") # Wipe scheduler instance.wipe_all_schedules() # Check schedules are wiped assert instance.all_schedules(test_repository.name) == []
def test_start_and_stop_schedule(): with TemporaryDirectory() as tempdir: repository = RepositoryDefinition(name="test_repository", schedule_defs=define_schedules()) instance = define_scheduler_instance(tempdir) # Initialize scheduler reconcile_scheduler_state( python_path=sys.executable, repository_path="", repository=repository, instance=instance, ) schedule_def = repository.get_schedule_def("no_config_pipeline_every_min_schedule") # Start schedule schedule = instance.start_schedule(repository, "no_config_pipeline_every_min_schedule") check.inst_param(schedule, 'schedule', Schedule) assert "/bin/python" in schedule.python_path assert 'schedules' in os.listdir(tempdir) assert "{}.{}.sh".format(repository.name, schedule_def.name) in os.listdir( os.path.join(tempdir, 'schedules', 'scripts') ) # End schedule instance.stop_schedule(repository, "no_config_pipeline_every_min_schedule") assert "{}.{}.sh".format(repository.name, schedule_def.name) not in os.listdir( os.path.join(tempdir, 'schedules', 'scripts') )
def test_start_schedule_fails(restore_cron_tab, ): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) # Initialize scheduler instance.reconcile_scheduler_state( python_path=sys.executable, repository_path=file_relative_path(__file__, '.../repository.yaml'), repository=test_repository, ) schedule_def = test_repository.get_schedule_def( "no_config_pipeline_every_min_schedule") def raises(*args, **kwargs): raise Exception('Patch') instance._scheduler._start_cron_job = raises # pylint: disable=protected-access with pytest.raises(Exception, match='Patch'): instance.start_schedule_and_update_storage_state( test_repository.name, "no_config_pipeline_every_min_schedule") schedule = instance.get_schedule_by_name(test_repository.name, schedule_def.name) assert schedule.status == ScheduleStatus.STOPPED
def test_stop_schedule_unsuccessful(restore_cron_tab, ): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) # Initialize scheduler instance.reconcile_scheduler_state( python_path=sys.executable, repository_path=file_relative_path(__file__, '.../repository.yaml'), repository=test_repository, ) def do_nothing(*_): pass instance._scheduler._end_cron_job = do_nothing # pylint: disable=protected-access instance.start_schedule_and_update_storage_state( test_repository.name, "no_config_pipeline_every_min_schedule") # End schedule with pytest.raises( DagsterSchedulerError, match="Attempted to remove existing cron job for schedule " "no_config_pipeline_every_min_schedule, but failed. There are still 1 jobs running for " "the schedule.", ): instance.stop_schedule_and_update_storage_state( test_repository.name, "no_config_pipeline_every_min_schedule")
def test_start_and_stop_schedule(restore_cron_tab, ): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) # Initialize scheduler instance.reconcile_scheduler_state( python_path=sys.executable, repository_path=file_relative_path(__file__, '.../repository.yaml'), repository=test_repository, ) schedule_def = test_repository.get_schedule_def( "no_config_pipeline_every_min_schedule") schedule = instance.start_schedule_and_update_storage_state( test_repository.name, "no_config_pipeline_every_min_schedule") check.inst_param(schedule, 'schedule', Schedule) assert "/bin/python" in schedule.python_path assert 'schedules' in os.listdir(tempdir) assert "{}.{}.sh".format(test_repository.name, schedule_def.name) in os.listdir( os.path.join(tempdir, 'schedules', 'scripts')) instance.stop_schedule_and_update_storage_state( test_repository.name, "no_config_pipeline_every_min_schedule") assert "{}.{}.sh".format(test_repository.name, schedule_def.name) not in os.listdir( os.path.join(tempdir, 'schedules', 'scripts'))
def test_re_init(restore_cron_tab): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) # Initialize scheduler instance.reconcile_scheduler_state( python_path=sys.executable, repository_path=file_relative_path(__file__, '.../repository.yaml'), repository=test_repository, ) # Start schedule schedule = instance.start_schedule_and_update_storage_state( test_repository.name, "no_config_pipeline_every_min_schedule") # Re-initialize scheduler instance.reconcile_scheduler_state( python_path=sys.executable, repository_path=file_relative_path(__file__, '.../repository.yaml'), repository=test_repository, ) # Check schedules are saved to disk assert 'schedules' in os.listdir(tempdir) schedules = instance.all_schedules(test_repository.name) for schedule in schedules: assert "/bin/python" in schedule.python_path
def test_start_schedule_fails( restore_cron_tab, ): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) with get_test_external_repo() as external_repo: # Initialize scheduler instance.reconcile_scheduler_state(external_repo) def raises(*args, **kwargs): raise Exception("Patch") instance._scheduler._start_cron_job = raises # pylint: disable=protected-access with pytest.raises(Exception, match="Patch"): instance.start_schedule_and_update_storage_state( external_repo.get_external_schedule("no_config_pipeline_every_min_schedule") ) schedule = instance.get_schedule_state( external_repo.get_external_schedule( "no_config_pipeline_every_min_schedule" ).get_origin_id() ) assert schedule.status == ScheduleStatus.STOPPED
def test_re_init(restore_cron_tab): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) with get_test_external_repo() as external_repo: now = get_current_datetime_in_utc() # Initialize scheduler instance.reconcile_scheduler_state(external_repo) # Start schedule schedule_state = instance.start_schedule_and_update_storage_state( external_repo.get_external_schedule("no_config_pipeline_every_min_schedule") ) assert schedule_state.start_timestamp == get_timestamp_from_utc_datetime(now) # Re-initialize scheduler instance.reconcile_scheduler_state(external_repo) # Check schedules are saved to disk assert "schedules" in os.listdir(tempdir) schedule_states = instance.all_stored_schedule_state() for state in schedule_states: if state.name == "no_config_pipeline_every_min_schedule": assert state == schedule_state
def test_reconcile_failure(restore_cron_tab): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) instance.reconcile_scheduler_state( python_path=sys.executable, repository_path=file_relative_path(__file__, '.../repository.yam'), repository=test_repository, ) instance.start_schedule_and_update_storage_state( test_repository.name, "no_config_pipeline_every_min_schedule") def failed_start_job(*_): raise DagsterSchedulerError("Failed to start") def failed_end_job(*_): raise DagsterSchedulerError("Failed to stop") instance._scheduler.start_schedule = failed_start_job # pylint: disable=protected-access instance._scheduler.stop_schedule = failed_end_job # pylint: disable=protected-access # Initialize scheduler with pytest.raises( DagsterScheduleReconciliationError, match= "Error 1: Failed to stop\n Error 2: Failed to stop\n Error 3: Failed to stop", ): instance.reconcile_scheduler_state( python_path=sys.executable, repository_path=file_relative_path(__file__, '.../repository.yam'), repository=test_repository, )
def test_start_and_stop_schedule(): with TemporaryDirectory() as tempdir: instance = DagsterInstance.local_temp(tempdir=tempdir) scheduler_handle = define_scheduler(instance.schedules_directory(), 'test_repository') assert scheduler_handle # Initialize scheduler scheduler_handle.up(python_path=sys.executable, repository_path="") scheduler = scheduler_handle.get_scheduler() schedule_def = scheduler_handle.get_schedule_def_by_name( "no_config_pipeline_every_min_schedule" ) # Start schedule schedule = scheduler.start_schedule("no_config_pipeline_every_min_schedule") check.inst_param(schedule, 'schedule', Schedule) assert "/bin/python" in schedule.python_path assert 'schedules' in os.listdir(tempdir) assert "{}_{}.sh".format(schedule_def.name, schedule.schedule_id) in os.listdir( os.path.join(tempdir, 'schedules') ) # End schedule scheduler.stop_schedule("no_config_pipeline_every_min_schedule") assert "{}_{}.sh".format(schedule_def.name, schedule.schedule_id) not in os.listdir( os.path.join(tempdir, 'schedules') )
def test_reconcile_failure_when_deleting_schedule_def(restore_cron_tab, ): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) # Initialize scheduler instance.reconcile_scheduler_state( python_path=sys.executable, repository_path=file_relative_path(__file__, '.../repository.yam'), repository=test_repository, ) assert len( instance.all_schedules(repository_name=test_repository.name)) == 3 def failed_end_job(*_): raise DagsterSchedulerError("Failed to stop") instance._scheduler.stop_schedule_and_delete_from_storage = ( # pylint: disable=protected-access failed_end_job) with pytest.raises( DagsterScheduleReconciliationError, match="Error 1: Failed to stop", ): instance.reconcile_scheduler_state( python_path=sys.executable, repository_path=file_relative_path(__file__, '.../repository.yam'), repository=smaller_repository, )
def test_start_schedule_fails(): with TemporaryDirectory() as tempdir: repository = RepositoryDefinition(name="test_repository", schedule_defs=define_schedules()) instance = define_scheduler_instance(tempdir) # Initialize scheduler reconcile_scheduler_state( python_path=sys.executable, repository_path="", repository=repository, instance=instance, ) schedule_def = repository.get_schedule_def("no_config_pipeline_every_min_schedule") def raises(*args, **kwargs): raise Exception('Patch') instance._scheduler._start_cron_job = raises # pylint: disable=protected-access with pytest.raises(Exception, match='Patch'): instance.start_schedule(repository, "no_config_pipeline_every_min_schedule") schedule = instance.get_schedule_by_name(repository, schedule_def.name) assert schedule.status == ScheduleStatus.STOPPED
def test_log_directory(restore_cron_tab): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) with get_test_external_repo() as external_repo: external_schedule = external_repo.get_external_schedule( "no_config_pipeline_every_min_schedule" ) schedule_log_path = instance.logs_path_for_schedule( external_schedule.get_external_origin_id() ) assert schedule_log_path.endswith( "/schedules/logs/{schedule_origin_id}/scheduler.log".format( schedule_origin_id=external_schedule.get_external_origin_id() ) ) # Start schedule instance.start_schedule_and_update_storage_state(external_schedule) # Wipe scheduler instance.wipe_all_schedules() # Check schedules are wiped assert instance.all_stored_job_state(job_type=JobType.SCHEDULE) == []
def test_start_and_end_schedule(): with TemporaryDirectory() as schedule_dir: repository = create_repository() # Start schedule schedule_def = repository.get_schedule( "no_config_pipeline_hourly_schedule") scheduler = repository.build_scheduler(schedule_dir=schedule_dir) assert scheduler schedule = scheduler.start_schedule(schedule_def, sys.executable, "") check.inst_param(schedule, 'schedule', RunningSchedule) assert schedule.schedule_definition == schedule_def assert "/bin/python" in schedule.python_path assert "{}_{}.json".format( schedule_def.name, schedule.schedule_id) in os.listdir(schedule_dir) assert "{}_{}.sh".format( schedule_def.name, schedule.schedule_id) in os.listdir(schedule_dir) # End schedule scheduler.end_schedule(schedule_def) assert "{}_{}.json".format( schedule_def.name, schedule.schedule_id) not in os.listdir(schedule_dir) assert "{}_{}.sh".format( schedule_def.name, schedule.schedule_id) not in os.listdir(schedule_dir)
def test_start_and_end_schedule(): with TemporaryDirectory() as tempdir: instance = DagsterInstance.local_temp(tempdir=tempdir, features=['scheduler']) scheduler = define_scheduler(instance.schedules_directory()) assert scheduler # Start schedule schedule_def = scheduler.get_schedule_def( "no_config_pipeline_hourly_schedule") schedule = scheduler.start_schedule(schedule_def, sys.executable, "") check.inst_param(schedule, 'schedule', RunningSchedule) assert schedule.schedule_definition == schedule_def assert "/bin/python" in schedule.python_path assert 'schedules' in os.listdir(tempdir) assert "{}_{}.json".format(schedule_def.name, schedule.schedule_id) in os.listdir( os.path.join(tempdir, 'schedules')) assert "{}_{}.sh".format(schedule_def.name, schedule.schedule_id) in os.listdir( os.path.join(tempdir, 'schedules')) # End schedule scheduler.end_schedule(schedule_def) assert "{}_{}.json".format( schedule_def.name, schedule.schedule_id) not in os.listdir(tempdir) assert "{}_{}.sh".format( schedule_def.name, schedule.schedule_id) not in os.listdir(tempdir)
def test_reconcile_failure(restore_cron_tab): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) with get_test_external_repo() as external_repo: instance.reconcile_scheduler_state(external_repo) instance.start_schedule_and_update_storage_state( external_repo.get_external_schedule( "no_config_pipeline_every_min_schedule")) def failed_start_job(*_): raise DagsterSchedulerError("Failed to start") def failed_end_job(*_): raise DagsterSchedulerError("Failed to stop") instance._scheduler.start_schedule = ( # pylint: disable=protected-access failed_start_job) instance._scheduler.stop_schedule = failed_end_job # pylint: disable=protected-access with pytest.raises( DagsterScheduleReconciliationError, match= "Error 1: Failed to stop\n Error 2: Failed to stop\n Error 3: Failed to stop", ): instance.reconcile_scheduler_state(external_repo)
def test_reconcile_schedule_without_start_time(): with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) with get_test_external_repo() as external_repo: external_schedule = external_repo.get_external_schedule( "no_config_pipeline_daily_schedule") legacy_schedule_state = JobState( external_schedule.get_external_origin(), JobType.SCHEDULE, JobStatus.RUNNING, ScheduleJobData(external_schedule.cron_schedule, None), ) instance.add_job_state(legacy_schedule_state) instance.reconcile_scheduler_state( external_repository=external_repo) reconciled_schedule_state = instance.get_job_state( external_schedule.get_external_origin_id()) assert reconciled_schedule_state.status == JobStatus.RUNNING assert (reconciled_schedule_state.job_specific_data.start_timestamp == get_timestamp_from_utc_datetime( get_current_datetime_in_utc()))
def test_stop_schedule_fails( restore_cron_tab, # pylint:disable=unused-argument,redefined-outer-name ): with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) with get_test_external_repo() as external_repo: external_schedule = external_repo.get_external_schedule( "no_config_pipeline_every_min_schedule") schedule_origin_id = external_schedule.get_external_origin_id() def raises(*args, **kwargs): raise Exception("Patch") instance._scheduler._end_cron_job = raises # pylint: disable=protected-access instance.start_schedule_and_update_storage_state(external_schedule) assert "schedules" in os.listdir(tempdir) assert "{}.sh".format(schedule_origin_id) in os.listdir( os.path.join(tempdir, "schedules", "scripts")) # End schedule with pytest.raises(Exception, match="Patch"): instance.stop_schedule_and_update_storage_state( schedule_origin_id) schedule = instance.get_job_state(schedule_origin_id) assert schedule.status == JobStatus.RUNNING
def test_stop_schedule_unsuccessful(restore_cron_tab, ): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) with get_test_external_repo() as external_repo: def do_nothing(*_): pass instance._scheduler._end_cron_job = do_nothing # pylint: disable=protected-access instance.start_schedule_and_update_storage_state( external_repo.get_external_schedule( "no_config_pipeline_every_min_schedule")) # End schedule with pytest.raises( DagsterSchedulerError, match="Attempted to remove existing cron job for schedule " "no_config_pipeline_every_min_schedule, but failed. There are still 1 jobs running for " "the schedule.", ): instance.stop_schedule_and_update_storage_state( external_repo.get_external_schedule( "no_config_pipeline_every_min_schedule"). get_external_origin_id())
def test_start_schedule_manual_duplicate_schedules_add_debug( restore_cron_tab, snapshot # pylint:disable=unused-argument,redefined-outer-name ): with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) with get_test_external_repo() as external_repo: external_schedule = external_repo.get_external_schedule( "no_config_pipeline_every_min_schedule") instance.start_schedule_and_update_storage_state(external_schedule) # Manually add extra cron tabs instance.scheduler._start_cron_job( # pylint: disable=protected-access instance, external_schedule, ) instance.scheduler._start_cron_job( # pylint: disable=protected-access instance, external_schedule, ) # Check debug command debug_info = instance.scheduler_debug_info() assert len(debug_info.errors) == 1 # Reconcile should fix error instance.reconcile_scheduler_state(external_repo) debug_info = instance.scheduler_debug_info() assert len(debug_info.errors) == 0
def test_start_and_stop_schedule( restore_cron_tab, do_initial_reconcile, ): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) with get_test_external_repo() as external_repo: if do_initial_reconcile: instance.reconcile_scheduler_state(external_repo) schedule = external_repo.get_external_schedule( "no_config_pipeline_every_min_schedule") schedule_origin_id = schedule.get_external_origin_id() instance.start_schedule_and_update_storage_state(schedule) assert "schedules" in os.listdir(tempdir) assert "{}.sh".format(schedule_origin_id) in os.listdir( os.path.join(tempdir, "schedules", "scripts")) instance.stop_schedule_and_update_storage_state(schedule_origin_id) assert "{}.sh".format(schedule_origin_id) not in os.listdir( os.path.join(tempdir, "schedules", "scripts"))
def test_add_schedule_def(restore_cron_tab): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) with get_smaller_external_repo() as external_repo: # Start all schedule and verify cron tab, schedule storage, and errors instance.start_schedule_and_update_storage_state( external_repo.get_external_schedule( "no_config_pipeline_daily_schedule")) instance.start_schedule_and_update_storage_state( external_repo.get_external_schedule( "no_config_pipeline_every_min_schedule")) assert len(instance.all_stored_schedule_state()) == 2 assert len(get_cron_jobs()) == 2 assert len(instance.scheduler_debug_info().errors) == 0 with get_test_external_repo() as external_repo: # Reconcile with an additional schedule added instance.reconcile_scheduler_state(external_repo) assert len(instance.all_stored_schedule_state()) == 3 assert len(get_cron_jobs()) == 2 assert len(instance.scheduler_debug_info().errors) == 0 instance.start_schedule_and_update_storage_state( external_repo.get_external_schedule( "default_config_pipeline_every_min_schedule")) assert len(instance.all_stored_schedule_state()) == 3 assert len(get_cron_jobs()) == 3 assert len(instance.scheduler_debug_info().errors) == 0
def test_init(restore_cron_tab): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) with get_test_external_repo() as external_repository: # Initialize scheduler instance.reconcile_scheduler_state(external_repository) # Check schedules are saved to disk assert "schedules" in os.listdir(tempdir) assert instance.all_stored_job_state(job_type=JobType.SCHEDULE)
def test_remove_schedule_def(restore_cron_tab, ): # pylint:disable=unused-argument,redefined-outer-name with TemporaryDirectory() as tempdir: instance = define_scheduler_instance(tempdir) with get_test_external_repo() as external_repo: instance.reconcile_scheduler_state(external_repo) assert len(instance.all_stored_schedule_state()) == 3 with get_smaller_external_repo() as smaller_repo: instance.reconcile_scheduler_state(smaller_repo) assert len(instance.all_stored_schedule_state()) == 2