def start_scheduled_execution(graphene_info, schedule_name): from dagster_graphql.schema.roots import create_execution_metadata check.inst_param(graphene_info, 'graphene_info', ResolveInfo) check.str_param(schedule_name, 'schedule_name') schedule = get_dagster_schedule(graphene_info, schedule_name) schedule_def = get_dagster_schedule_def(graphene_info, schedule_name) # Run should_execute and halt if it returns False should_execute = schedule_def.should_execute if should_execute() != True: return graphene_info.schema.type_named('ScheduledExecutionBlocked')( message='Schedule {schedule_name} did not run because the should_execute did not return ' 'True' ) # Add dagster/schedule_id tag to executionMetadata execution_params = merge_dicts( {'executionMetadata': {'tags': []}}, schedule_def.execution_params ) # Check that the dagster/schedule_id tag is not already set check.invariant( not any( tag['key'] == 'dagster/schedule_id' for tag in execution_params['executionMetadata']['tags'] ), "Tag dagster/schedule_id tag is already defined in executionMetadata.tags", ) # Check that the dagster/schedule_name tag is not already set check.invariant( not any( tag['key'] == 'dagster/schedule_name' for tag in execution_params['executionMetadata']['tags'] ), "Tag dagster/schedule_name tag is already defined in executionMetadata.tags", ) execution_params['executionMetadata']['tags'].append( {'key': 'dagster/schedule_id', 'value': schedule.schedule_id} ) execution_params['executionMetadata']['tags'].append( {'key': 'dagster/schedule_name', 'value': schedule.name} ) selector = execution_params['selector'] execution_params = ExecutionParams( selector=ExecutionSelector(selector['name'], selector.get('solidSubset')), environment_dict=execution_params.get('environmentConfigData'), mode=execution_params.get('mode'), execution_metadata=create_execution_metadata(execution_params.get('executionMetadata')), step_keys=execution_params.get('stepKeys'), previous_run_id=None, ) return start_pipeline_execution(graphene_info, execution_params)
def start_scheduled_execution(graphene_info, schedule_name): from dagster_graphql.schema.roots import create_execution_metadata check.inst_param(graphene_info, 'graphene_info', ResolveInfo) check.str_param(schedule_name, 'schedule_name') schedule = get_dagster_schedule(graphene_info, schedule_name) schedule_def = get_dagster_schedule_def(graphene_info, schedule_name) schedule_context = ScheduleExecutionContext(graphene_info.context.instance) # Run should_execute and halt if it returns False if not schedule_def.should_execute(schedule_context): return graphene_info.schema.type_named( 'ScheduledExecutionBlocked' )(message= 'Schedule {schedule_name} did not run because the should_execute did not return' ' True'.format(schedule_name=schedule_name)) # Get environment_dict environment_dict = schedule_def.get_environment_dict(schedule_context) tags = schedule_def.get_tags(schedule_context) check.invariant('dagster/schedule_id' not in tags) tags['dagster/schedule_id'] = schedule.schedule_id check.invariant('dagster/schedule_name' not in tags) tags['dagster/schedule_name'] = schedule_def.name execution_metadata_tags = [{ 'key': key, 'value': value } for key, value in tags.items()] execution_params = merge_dicts( schedule_def.execution_params, {'executionMetadata': { 'tags': execution_metadata_tags }}) selector = ExecutionSelector( execution_params['selector']['name'], execution_params['selector'].get('solidSubset')) execution_params = ExecutionParams( selector=selector, environment_dict=environment_dict, mode=execution_params.get('mode'), execution_metadata=create_execution_metadata( execution_params.get('executionMetadata')), step_keys=execution_params.get('stepKeys'), previous_run_id=None, ) # Launch run if run launcher is defined run_launcher = graphene_info.context.instance.run_launcher if run_launcher: return launch_pipeline_execution(graphene_info, execution_params) return start_pipeline_execution(graphene_info, execution_params)