Пример #1
0
def _process_base_ops(op: BaseOp):
    """Recursively go through the attrs listed in `attrs_with_pipelineparams`
    and sanitize and replace pipeline params with template var string.

    Returns a processed `BaseOp`.

    NOTE this is an in-place update to `BaseOp`'s attributes (i.e. the ones
    specified in `attrs_with_pipelineparams`, all `PipelineParam` are replaced
    with the corresponding template variable strings).

    Args:
        op {BaseOp}: class that inherits from BaseOp

    Returns:
        BaseOp
    """

    # map param's (unsanitized pattern or serialized str pattern) -> input param var str
    # Pick the shortest param full name if there's two identical params in the DSL
    map_to_tmpl_var = {}
    for param in op.inputs:
        key = (param.pattern or str(param))
        if not map_to_tmpl_var.get(key) or len(
                '$(inputs.params.%s)' % param.full_name) < len(
                    map_to_tmpl_var.get(key)):
            map_to_tmpl_var[key] = '$(inputs.params.%s)' % param.full_name

    # process all attr with pipelineParams except inputs and outputs parameters
    for key in op.attrs_with_pipelineparams:
        setattr(op, key, _process_obj(getattr(op, key), map_to_tmpl_var))

    return op
Пример #2
0
def _process_base_ops(op: BaseOp):
    """Recursively go through the attrs listed in `attrs_with_pipelineparams`
    and sanitize and replace pipeline params with template var string.

    Returns a processed `BaseOp`.

    NOTE this is an in-place update to `BaseOp`'s attributes (i.e. the ones
    specified in `attrs_with_pipelineparams`, all `PipelineParam` are replaced
    with the corresponding template variable strings).

    Args:
        op {BaseOp}: class that inherits from BaseOp

    Returns:
        BaseOp
    """

    # map param's (unsanitized pattern or serialized str pattern) -> input param var str
    map_to_tmpl_var = {
        (param.pattern or str(param)): '$(inputs.params.%s)' %
        param.full_name.replace('_', '-')  # Tekton change
        for param in op.inputs
    }

    # process all attr with pipelineParams except inputs and outputs parameters
    for key in op.attrs_with_pipelineparams:
        setattr(op, key, _process_obj(getattr(op, key), map_to_tmpl_var))

    return op
Пример #3
0
  def _group_to_dag_template(self, group, inputs, outputs, dependencies):
    """Generate template given an OpsGroup.

    inputs, outputs, dependencies are all helper dicts.
    """
    template = {'name': group.name}
    if group.parallelism != None:
      template["parallelism"] = group.parallelism

    # Generate inputs section.
    if inputs.get(group.name, None):
      template_inputs = [{'name': x[0]} for x in inputs[group.name]]
      template_inputs.sort(key=lambda x: x['name'])
      template['inputs'] = {
        'parameters': template_inputs
      }

    # Generate outputs section.
    if outputs.get(group.name, None):
      template_outputs = []
      for param_name, dependent_name in outputs[group.name]:
        template_outputs.append({
          'name': param_name,
          'valueFrom': {
            'parameter': '{{tasks.%s.outputs.parameters.%s}}' % (dependent_name, param_name)
          }
        })
      template_outputs.sort(key=lambda x: x['name'])
      template['outputs'] = {'parameters': template_outputs}

    # Generate tasks section.
    tasks = []
    sub_groups = group.groups + group.ops
    for sub_group in sub_groups:
      is_recursive_subgroup = (isinstance(sub_group, OpsGroup) and sub_group.recursive_ref)
      # Special handling for recursive subgroup: use the existing opsgroup name
      if is_recursive_subgroup:
        task = {
            'name': sub_group.recursive_ref.name,
            'template': sub_group.recursive_ref.name,
        }
      else:
        task = {
          'name': sub_group.name,
          'template': sub_group.name,
        }
      if isinstance(sub_group, dsl.OpsGroup) and sub_group.type == 'condition':
        subgroup_inputs = inputs.get(sub_group.name, [])
        condition = sub_group.condition
        operand1_value = self._resolve_value_or_reference(condition.operand1, subgroup_inputs)
        operand2_value = self._resolve_value_or_reference(condition.operand2, subgroup_inputs)
        if condition.operator in ['==', '!=']:
          operand1_value = '"' + operand1_value + '"'
          operand2_value = '"' + operand2_value + '"'
        task['when'] = '{} {} {}'.format(operand1_value, condition.operator, operand2_value)

      # Generate dependencies section for this task.
      if dependencies.get(sub_group.name, None):
        group_dependencies = list(dependencies[sub_group.name])
        group_dependencies.sort()
        task['dependencies'] = group_dependencies

      # Generate arguments section for this task.
      if inputs.get(sub_group.name, None):
        task['arguments'] = {'parameters': self.get_arguments_for_sub_group(sub_group, is_recursive_subgroup, inputs)}

      # additional task modifications for withItems and withParam
      if isinstance(sub_group, dsl.ParallelFor):
        if sub_group.items_is_pipeline_param:
          # these loop args are a 'withParam' rather than 'withItems'.
          # i.e., rather than a static list, they are either the output of another task or were input
          # as global pipeline parameters

          pipeline_param = sub_group.loop_args.items_or_pipeline_param
          withparam_value = self._resolve_task_pipeline_param(pipeline_param, group.type)
          if pipeline_param.op_name:
            # these loop args are the output of another task
            if 'dependencies' not in task or task['dependencies'] is None:
              task['dependencies'] = []
            if sanitize_k8s_name(
                pipeline_param.op_name) not in task['dependencies'] and group.type != 'subgraph':
              task['dependencies'].append(
                  sanitize_k8s_name(pipeline_param.op_name))

          task['withParam'] = withparam_value
        else:
          # Need to sanitize the dict keys for consistency.
          loop_tasks = sub_group.loop_args.to_list_for_task_yaml()
          nested_pipeline_params = extract_pipelineparams_from_any(loop_tasks)

          # Set dependencies in case of nested pipeline_params
          map_to_tmpl_var = {str(p): self._resolve_task_pipeline_param(p, group.type) for p in nested_pipeline_params}
          for pipeline_param in nested_pipeline_params:
            if pipeline_param.op_name:
              # these pipeline_param are the output of another task
              if 'dependencies' not in task or task['dependencies'] is None:
                task['dependencies'] = []
              if sanitize_k8s_name(
                  pipeline_param.op_name) not in task['dependencies']:
                task['dependencies'].append(
                    sanitize_k8s_name(pipeline_param.op_name))

          sanitized_tasks = []
          if isinstance(loop_tasks[0], dict):
            for argument_set in loop_tasks:
              c_dict = {}
              for k, v in argument_set.items():
                c_dict[sanitize_k8s_name(k, True)] = v
              sanitized_tasks.append(c_dict)
          else:
            sanitized_tasks = loop_tasks
          # Replace pipeline param if map_to_tmpl_var not empty
          task['withItems'] = _process_obj(sanitized_tasks, map_to_tmpl_var) if map_to_tmpl_var else sanitized_tasks

        # We will sort dependencies to have determinitc yaml and thus stable tests
        if task.get('dependencies'):
          task['dependencies'].sort()

      tasks.append(task)
    tasks.sort(key=lambda x: x['name'])
    template['dag'] = {'tasks': tasks}
    return template