def resolve_schedules(self, graphene_info): external_repository = graphene_info.context.legacy_external_repository schedules = graphene_info.context.instance.all_schedules( external_repository.name) return [ graphene_info.schema.type_named('RunningSchedule')( graphene_info, schedule=schedule) for schedule in schedules if get_dagster_schedule_def(graphene_info, schedule.name).name == self.get_represented_pipeline().name ]
def __init__(self, graphene_info, schedule): self._schedule = check.inst_param(schedule, 'schedule', Schedule) super(DauphinRunningSchedule, self).__init__( schedule_id=schedule.schedule_id, schedule_definition=graphene_info.schema.type_named('ScheduleDefinition')( get_dagster_schedule_def(graphene_info, schedule.name) ), status=schedule.status, python_path=schedule.python_path, repository_path=schedule.repository_path, )
def __init__(self, graphene_info, schedule): self._schedule = check.inst_param(schedule, 'schedule', Schedule) external_repository = graphene_info.context.legacy_external_repository external_schedule = external_repository.get_external_schedule(schedule.name) super(DauphinRunningSchedule, self).__init__( schedule_definition=graphene_info.schema.type_named('ScheduleDefinition')( schedule_def=get_dagster_schedule_def(graphene_info, schedule.name), external_schedule=external_schedule, ), status=schedule.status, python_path=schedule.python_path, repository_path=schedule.repository_path, )
def resolve_schedules(self, graphene_info): represented_pipeline = self.get_represented_pipeline() if not isinstance(represented_pipeline, ExternalPipeline): # this is an historical pipeline snapshot, so there are not any associated running # schedules return [] schedules = graphene_info.context.instance.all_schedules( represented_pipeline.handle.repository_name ) return [ graphene_info.schema.type_named('RunningSchedule')(graphene_info, schedule=schedule) for schedule in schedules if get_dagster_schedule_def(graphene_info, schedule.name).name == self.get_represented_pipeline().name ]