Beispiel #1
0
    def test_start_pipeline(self, openshift, run_name_in_input, input_data):

        new_input_data = deepcopy(input_data)

        if new_input_data:
            new_input_data['metadata']['name'] = run_name_in_input

        p_run = PipelineRun(os=openshift,
                            pipeline_run_name=PIPELINE_RUN_NAME,
                            pipeline_run_data=new_input_data)

        responses.add(
            responses.POST,
            f'https://openshift.testing/apis/tekton.dev/v1beta1/namespaces/{TEST_OCP_NAMESPACE}/pipelineruns',  # noqa E501
            json={},
        )

        if new_input_data:
            if run_name_in_input == PIPELINE_RUN_NAME:
                p_run.start_pipeline_run()
                assert len(responses.calls) == 1
                req_body = json.loads(responses.calls[0].request.body)
                if new_input_data:
                    assert req_body['metadata']['name'] == run_name_in_input
            else:
                msg = f"Pipeline run name provided '{PIPELINE_RUN_NAME}' is different " \
                      f"than in input data '{run_name_in_input}'"
                with pytest.raises(OsbsException, match=msg):
                    p_run.start_pipeline_run()
                assert len(responses.calls) == 0
        else:
            match_exception = "No input data provided for pipeline run to start"
            with pytest.raises(OsbsException, match=match_exception):
                p_run.start_pipeline_run()
            assert len(responses.calls) == 0
Beispiel #2
0
    def create_source_container_pipeline_run(self,
                                             component=None,
                                             koji_task_id=None,
                                             target=None,
                                             **kwargs):
        """
        Take input args, create source pipeline run

        :return: instance of PiplelineRun
        """
        error_messages = []
        # most likely can be removed, source build should get component name
        # from binary build OSBS2 TBD
        if not component:
            error_messages.append(
                "required argument 'component' can't be empty")
        if error_messages:
            raise OsbsValidationException(", ".join(error_messages))

        pipeline_run_name, pipeline_run_data = self._get_source_container_pipeline_data(
        )

        build_json_store = self.os_conf.get_build_json_store()
        user_params = SourceContainerUserParams.make_params(
            build_json_dir=build_json_store,
            build_conf=self.os_conf,
            component=component,
            koji_target=target,
            koji_task_id=koji_task_id,
            pipeline_run_name=pipeline_run_name,
            **kwargs)

        self._set_source_container_pipeline_data(pipeline_run_name,
                                                 pipeline_run_data,
                                                 user_params)

        logger.info("creating source container image pipeline run: %s",
                    pipeline_run_name)

        pipeline_run = PipelineRun(self.os, pipeline_run_name,
                                   pipeline_run_data)

        try:
            logger.info("pipeline run created: %s",
                        pipeline_run.start_pipeline_run())
        except OsbsResponseException:
            logger.error("failed to create pipeline run %s", pipeline_run_name)
            raise

        return pipeline_run
Beispiel #3
0
    def test_start_pipeline(self, openshift,
                            expected_request_body_pipeline_run,
                            run_name_in_input, input_data, labels):

        expected_request_body = deepcopy(expected_request_body_pipeline_run)
        new_input_data = deepcopy(input_data)

        if new_input_data:
            new_input_data['metadata']['name'] = run_name_in_input
            if labels:
                new_input_data['metadata']['labels'] = labels
                expected_request_body['metadata']['labels'] = labels

        p_run = PipelineRun(os=openshift,
                            pipeline_run_name=PIPELINE_RUN_NAME,
                            pipeline_run_data=new_input_data)

        responses.add(
            responses.POST,
            f'https://openshift.testing/apis/tekton.dev/v1beta1/namespaces/{TEST_OCP_NAMESPACE}/pipelineruns',  # noqa E501
            match=[responses.json_params_matcher(expected_request_body)],
            json={},
        )

        if new_input_data:
            if run_name_in_input == PIPELINE_RUN_NAME:
                p_run.start_pipeline_run()
                assert len(responses.calls) == 1
            else:
                msg = f"Pipeline run name provided '{PIPELINE_RUN_NAME}' is different " \
                      f"than in input data '{run_name_in_input}'"
                with pytest.raises(OsbsException, match=msg):
                    p_run.start_pipeline_run()
                assert len(responses.calls) == 0
        else:
            match_exception = "No input data provided for pipeline run to start"
            with pytest.raises(OsbsException, match=match_exception):
                p_run.start_pipeline_run()
            assert len(responses.calls) == 0
Beispiel #4
0
    def create_binary_container_pipeline_run(
            self,
            git_uri=_REQUIRED_PARAM,
            git_ref=_REQUIRED_PARAM,
            git_branch=_REQUIRED_PARAM,
            component=None,
            flatpak=None,
            git_commit_depth=None,
            isolated=None,
            koji_task_id=None,
            target=None,
            operator_csv_modifications_url=None,
            **kwargs):

        required_params = {
            "git_uri": git_uri,
            "git_ref": git_ref,
            "git_branch": git_branch
        }
        missing_params = []
        for param_name, param_arg in required_params.items():
            if param_arg is _REQUIRED_PARAM or not param_arg:
                missing_params.append(param_name)
        if missing_params:
            raise OsbsException('required parameter {} missing'.format(
                ", ".join(missing_params)))

        if operator_csv_modifications_url and not isolated:
            raise OsbsException(
                'Only isolated build can update operator CSV metadata')

        repo_info = utils.get_repo_info(git_uri,
                                        git_ref,
                                        git_branch=git_branch,
                                        depth=git_commit_depth)

        self._checks_for_flatpak(flatpak, repo_info)

        req_labels = self._check_labels(repo_info)

        user_params = self.get_user_params(
            base_image=repo_info.base_image,
            component=component,
            flatpak=flatpak,
            isolated=isolated,
            koji_target=target,
            koji_task_id=koji_task_id,
            req_labels=req_labels,
            repo_info=repo_info,
            operator_csv_modifications_url=operator_csv_modifications_url,
            **kwargs)

        self._checks_for_isolated(user_params)

        pipeline_run_name, pipeline_run_data = self._get_binary_container_pipeline_data(
            user_params)

        user_params.pipeline_run_name = pipeline_run_name

        self._set_binary_container_pipeline_data(pipeline_run_name,
                                                 pipeline_run_data,
                                                 user_params)

        logger.info("creating binary container image pipeline run: %s",
                    pipeline_run_name)

        pipeline_run = PipelineRun(self.os, pipeline_run_name,
                                   pipeline_run_data)

        try:
            logger.info("pipeline run created: %s",
                        pipeline_run.start_pipeline_run())
        except OsbsResponseException:
            logger.error("failed to create pipeline run %s", pipeline_run_name)
            raise

        return pipeline_run