Esempio n. 1
0
    def _verify_compiled_workflow(self, golden_yaml_file, compiled_workflow):
        """
    Tests if the compiled workflow matches the golden yaml.
    """
        if GENERATE_GOLDEN_YAML:
            # TODO: generate the pipelineloop CRD files akin to ...
            #   for f in testdata/*_pipelineloop_cr*.yaml; do \
            #     echo ${f/_pipelineloop_cr*.yaml/.py}; done | sort -u | while read f; do \
            #     echo $f; dsl-compile-tekton --py $f --output ${f/.py/.yaml}; \
            #   done
            with open(golden_yaml_file, 'w') as f:
                f.write(LICENSE_HEADER)
            with open(golden_yaml_file, 'a+') as f:
                yaml_text = dump_yaml(compiled_workflow)
                f.write(yaml_text)
        else:
            with open(golden_yaml_file, 'r') as f:
                golden = yaml.safe_load(f)
            self.maxDiff = None

            # sort dicts and lists, insertion order was not guaranteed before Python 3.6
            if sys.version_info < (3, 6, 0):

                def sort_items(obj):
                    from collections import OrderedDict
                    if isinstance(obj, dict):
                        return OrderedDict(
                            {k: sort_items(v)
                             for k, v in sorted(obj.items())})
                    elif isinstance(obj, list):
                        return sorted([sort_items(o) for o in obj],
                                      key=lambda x: str(x))
                    else:
                        return obj

                golden = sort_items(golden)
                compiled_workflow = sort_items(compiled_workflow)

            self.assertEqual(
                golden,
                compiled_workflow,
                msg="\n===[ " + golden_yaml_file.split(os.path.sep)[-1] +
                " ]===\n" + json.dumps(compiled_workflow, indent=2))
Esempio n. 2
0
    def _write_workflow(workflow: Dict[Text, Any], package_path: Text = None):
        """Dump pipeline workflow into yaml spec and write out in the format specified by the user.

    Args:
      workflow: Workflow spec of the pipline, dict.
      package_path: file path to be written. If not specified, a yaml_text string
        will be returned.
    """
        yaml_text = dump_yaml(workflow)

        # Use regex to replace all the Argo variables to Tekton variables. For variables that are unique to Argo,
        # we raise an Error to alert users about the unsupported variables. Here is the list of Argo variables.
        # https://github.com/argoproj/argo/blob/master/docs/variables.md
        # Since Argo variables can be used in anywhere in the yaml, we need to dump and then parse the whole yaml
        # using regular expression.
        tekton_var_regex_rules = [{
            'argo_rule': '{{inputs.parameters.([^ \t\n.:,;{}]+)}}',
            'tekton_rule': '$(inputs.params.\g<1>)'
        }, {
            'argo_rule': '{{outputs.parameters.([^ \t\n.:,;{}]+).path}}',
            'tekton_rule': '$(results.\g<1>.path)'
        }, {
            'argo_rule': '{{workflow.uid}}',
            'tekton_rule': '$(context.pipelineRun.uid)'
        }, {
            'argo_rule': '{{workflow.name}}',
            'tekton_rule': '$(context.pipelineRun.name)'
        }, {
            'argo_rule':
            '{{workflow.namespace}}',
            'tekton_rule':
            '$(context.pipelineRun.namespace)'
        }, {
            'argo_rule': '{{workflow.parameters.([^ \t\n.:,;{}]+)}}',
            'tekton_rule': '$(params.\g<1>)'
        }]
        for regex_rule in tekton_var_regex_rules:
            yaml_text = re.sub(regex_rule['argo_rule'],
                               regex_rule['tekton_rule'], yaml_text)

        unsupported_vars = re.findall(r"{{[^ \t\n.:,;{}]+\.[^ \t\n:,;{}]+}}",
                                      yaml_text)
        if unsupported_vars:
            raise ValueError(
                'These Argo variables are not supported in Tekton Pipeline: %s'
                % ", ".join(str(v) for v in set(unsupported_vars)))

        if '{{pipelineparam' in yaml_text:
            raise RuntimeError(
                'Internal compiler error: Found unresolved PipelineParam. '
                'Please create a new issue at https://github.com/kubeflow/kfp-tekton/issues '
                'attaching the pipeline DSL code and the pipeline YAML.')

        yaml_text = dump_yaml(
            _handle_tekton_pipeline_variables(
                yaml.load(yaml_text, Loader=yaml.FullLoader)))

        if package_path is None:
            return yaml_text

        if package_path.endswith('.tar.gz') or package_path.endswith('.tgz'):
            from contextlib import closing
            from io import BytesIO
            with tarfile.open(package_path, "w:gz") as tar:
                with closing(BytesIO(yaml_text.encode())) as yaml_file:
                    tarinfo = tarfile.TarInfo('pipeline.yaml')
                    tarinfo.size = len(yaml_file.getvalue())
                    tar.addfile(tarinfo, fileobj=yaml_file)
        elif package_path.endswith('.zip'):
            with zipfile.ZipFile(package_path, "w") as zip:
                zipinfo = zipfile.ZipInfo('pipeline.yaml')
                zipinfo.compress_type = zipfile.ZIP_DEFLATED
                zip.writestr(zipinfo, yaml_text)
        elif package_path.endswith('.yaml') or package_path.endswith('.yml'):
            with open(package_path, 'w') as yaml_file:
                yaml_file.write(yaml_text)
        else:
            raise ValueError(
                'The output path %s should end with one of the following formats: '
                '[.tar.gz, .tgz, .zip, .yaml, .yml]' % package_path)