Beispiel #1
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # NOTE:
        #   at some point may need to do smarter logic if a deployable has more then one deployed
        #   host URL to do UAT against all of them, but for now, use first one as target of UAT
        deployed_host_urls = self.get_value('deployed-host-urls')
        if isinstance(deployed_host_urls, list):
            target_host_url = deployed_host_urls[0]
            if len(deployed_host_urls) > 1:
                step_result.message = \
                    f"Given more then one deployed host URL ({deployed_host_urls})," \
                    f" targeting first one ({target_host_url}) for user acceptance test (UAT)."
                print(step_result.message)
        elif deployed_host_urls:
            target_host_url = deployed_host_urls
        else:
            target_host_url = self.get_value('target-host-url')

        # run the tests
        print("Run user acceptance tests (UAT)")
        mvn_output_file_path = self.write_working_file('mvn_output.txt')
        try:
            # execute maven step (params come from config)
            self._run_maven_step(
                mvn_output_file_path=mvn_output_file_path,
                step_implementer_additional_arguments=[
                    f'-D{self.get_value("target-host-url-maven-argument-name")}={target_host_url}'
                ])
        except StepRunnerException as error:
            step_result.success = False
            step_result.message = "Error running maven. " \
                f"More details maybe found in report artifacts: {error}"
        finally:
            step_result.add_artifact(
                description="Standard out and standard error from maven.",
                name='maven-output',
                value=mvn_output_file_path)

        # get test report dir
        test_report_dirs = self.__get_test_report_dirs()
        if test_report_dirs:
            step_result.add_artifact(
                description="Test report generated when running unit tests.",
                name='test-report',
                value=test_report_dirs)

            # gather test report evidence
            self._gather_evidence_from_test_report_directory_testsuite_elements(
                step_result=step_result, test_report_dirs=test_report_dirs)

        # return result
        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        tox_output_file_path = self.write_working_file('tox_test_output.txt')

        try:
            self._run_tox_step(
                tox_output_file_path=tox_output_file_path
            )

        except StepRunnerException as error:
            step_result.message = "Unit test failures. See 'tox-output'" \
                f" report artifacts for details: {error}"
            step_result.success = False
        finally:
            step_result.add_artifact(
                description="Standard out and standard error from 'tox test'.",
                name='tox-output',
                value=tox_output_file_path
            )

        return step_result
Beispiel #3
0
    def test_from_step_implementer_with_env(self):
        config = Config({
            'step-runner-config': {
                'foo': {
                    'implementer':
                    'tests.helpers.sample_step_implementers.FooStepImplementer',
                    'config': {}
                }
            }
        })
        step_config = config.get_step_config('foo')
        sub_step = step_config.get_sub_step(
            'tests.helpers.sample_step_implementers.FooStepImplementer')

        step = FooStepImplementer(workflow_result=WorkflowResult(),
                                  parent_work_dir_path=None,
                                  config=sub_step,
                                  environment='blarg')

        step_result = StepResult.from_step_implementer(step)

        expected_step_result = StepResult(
            step_name='foo',
            sub_step_name=
            'tests.helpers.sample_step_implementers.FooStepImplementer',
            sub_step_implementer_name=
            'tests.helpers.sample_step_implementers.FooStepImplementer',
            environment='blarg')

        self.assertEqual(step_result, expected_step_result)
Beispiel #4
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        npm_output_file_path = self.write_working_file('npm_package_output.txt')
        try:
            self._run_npm_step(
                npm_output_file_path=npm_output_file_path
            )

        except StepRunnerException as error:
            step_result.message = "Error running npm install. See 'npm-output'" \
                f" report artifacts for details: {error}"
            step_result.success = False
        finally:
            step_result.add_artifact(
                description="Standard out and standard error from 'npm install'.",
                name='npm-output',
                value=npm_output_file_path
            )

        return step_result
Beispiel #5
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        package_file = self.get_value('package-file')

        with open(package_file, encoding='utf-8') as package_file_object:
            package_file_data = json.load(package_file_object)

        if not "version" in package_file_data:
            step_result.success = False
            step_result.message = f'Given npm package file ({package_file})' + \
              ' does not contain a \"version\" key.'
            return step_result

        step_result.add_artifact(name='app-version',
                                 value=package_file_data["version"])

        return step_result
Beispiel #6
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        try:
            step_result = StepResult.from_step_implementer(self)

            # auto increment the version
            auto_increment_version_segment = self.get_value('auto-increment-version-segment')
            if auto_increment_version_segment:
                print("Update maven package version")
                self.__auto_increment_version(auto_increment_version_segment, step_result)

            # get the version
            project_version = self.__get_project_version(step_result)
            if project_version:
                step_result.add_artifact(
                    name='app-version',
                    value=project_version
                )
            else:
                step_result.success = False
                step_result.message += 'Could not get project version from given pom file' \
                    f' ({self.get_value("pom-file")})'
        except StepRunnerException as error:
            step_result.success = False
            step_result.message = str(error)

        return step_result
    def _run_step(self):  # pylint: disable=too-many-locals
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # package the artifacts
        mvn_output_file_path = self.write_working_file('mvn_output.txt')
        try:
            # execute maven step (params come from config)
            self._run_maven_step(mvn_output_file_path=mvn_output_file_path)
        except StepRunnerException as error:
            step_result.success = False
            step_result.message = "Error running maven. " \
                f"More details maybe found in 'maven-output' report artifact: {error}"
        finally:
            step_result.add_artifact(
                description="Standard out and standard error from maven.",
                name='maven-output',
                value=mvn_output_file_path)

        return step_result
Beispiel #8
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # get the tag
        tag = self.__get_tag_value()
        step_result.add_artifact(
            name='tag',
            value=tag
        )

        # tag and push tags
        try:
            self.git_tag(tag)
            self.git_push_tags()
        except StepRunnerException as error:
            step_result.success = False
            step_result.message = f"Error tagging and pushing tags: {error}"

        return step_result
Beispiel #9
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        results_artifacts_archive = self.__create_archive()

        if results_artifacts_archive:
            results_artifacts_archive_value = results_artifacts_archive
        else:
            results_artifacts_archive_value = 'No result artifact values to archive.'

        step_result.add_artifact(
            name='result-artifacts-archive',
            value=results_artifacts_archive_value,
            description=
            'Archive of all of the step result artifacts marked for archiving.'
        )

        # if destination URL specified, then upload the results archvie
        results_archive_destination_url = self.get_value(
            'results-archive-destination-url')
        if results_artifacts_archive and results_archive_destination_url:
            org = self.get_value('organization')
            app = self.get_value('application-name')
            service = self.get_value('service-name')
            results_artifacts_archive_name = os.path.basename(
                results_artifacts_archive)
            results_archive_destination_uri = f"{results_archive_destination_url}/" \
                f"{org}/{app}/{service}/{results_artifacts_archive_name}"
            step_result.add_artifact(
                name='results-archive-uri',
                description='URI of the uploaded results archive.',
                value=results_archive_destination_uri)

            try:
                upload_result = upload_file(
                    file_path=results_artifacts_archive_value,
                    destination_uri=results_archive_destination_uri,
                    username=self.get_value(
                        'results-archive-destination-username'),
                    password=self.get_value(
                        'results-archive-destination-password'))
                step_result.add_artifact(
                    name='results-archive-upload-results',
                    description='Results of uploading the results archive ' \
                        'to the given destination.',
                    value=upload_result
                )
            except RuntimeError as error:
                step_result.success = False
                step_result.message = str(error)

        return step_result
Beispiel #10
0
 def _run_step(self):
     step_result = StepResult.from_step_implementer(self)
     runtime_step_config = self.config.get_copy_of_runtime_step_config(
         self.environment,
         self.step_implementer_config_defaults())
     for n, v in ConfigValue.convert_leaves_to_values(runtime_step_config).items():
         step_result.add_artifact(name=n, value=v)
     return step_result
Beispiel #11
0
    def _run_step(self):
        step_result = StepResult.from_step_implementer(self)
        runtime_step_config = self.config.get_copy_of_runtime_step_config(
            self.environment,
            self.step_implementer_config_defaults())

        # copy the key/value pairs into the artifacts
        for name, value in ConfigValue.convert_leaves_to_values(runtime_step_config).items():
            # print(name, value)
            step_result.add_artifact(name, value)
        return step_result
Beispiel #12
0
    def _run_step(self):  # pylint: disable=too-many-locals
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        pom_file = self.get_value('pom-file')
        artifact_extensions = self.get_value('artifact-extensions')
        artifact_parent_dir = self.get_value('artifact-parent-dir')

        # package the artifacts
        mvn_output_file_path = self.write_working_file('mvn_output.txt')
        try:
            # execute maven step (params come from config)
            self._run_maven_step(mvn_output_file_path=mvn_output_file_path)

            # find the artifacts
            packages = []
            pom_file_dir_name = os.path.dirname(os.path.abspath(pom_file))
            files_in_artifact_parent_dir = sorted(
                os.listdir(os.path.join(pom_file_dir_name,
                                        artifact_parent_dir)))
            for filename in files_in_artifact_parent_dir:
                if any(filename.endswith(ext) for ext in artifact_extensions):
                    packages += [{
                        'path':
                        os.path.join(pom_file_dir_name, artifact_parent_dir,
                                     filename)
                    }]

            step_result.add_artifact(name='packages', value=packages)
        except FileNotFoundError as error:
            step_result.success = False
            step_result.message = f"Error finding artifacts after running maven package: {error}"
        except StepRunnerException as error:
            step_result.success = False
            step_result.message = "Error running 'maven package' to package artifacts. " \
                f"More details maybe found in 'maven-output' report artifact: {error}"
        finally:
            step_result.add_artifact(
                description="Standard out and standard error from maven.",
                name='maven-output',
                value=mvn_output_file_path)

        return step_result
Beispiel #13
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        step_result.add_artifact(name='workflow-run-num',
                                 value=os.environ.get('BUILD_NUMBER'),
                                 description='Incremental workflow run number')

        return step_result
Beispiel #14
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        work_dir = self.work_dir_path

        #workflow attestation uri
        workflow_attestation_uri = self.get_value('evidence-uri')

        if workflow_attestation_uri is None:
            step_result.success = False
            step_result.message = 'No value found for evidence-uri'
            return step_result

        workflow_attestation_file_path = download_source_to_destination(
            workflow_attestation_uri, work_dir)

        workflow_policy_uri = self.get_value('workflow-policy-uri')

        #Download workflow policy from configured uri
        workflow_policy_file_path = download_source_to_destination(
            workflow_policy_uri, work_dir)

        audit_results, return_code = self.__audit_attestation(
            workflow_attestation_file_path, workflow_policy_file_path,
            self.DEFAULT_WORKFLOW_POLICY_QUERY)

        if return_code == 1:
            step_result.success = False
            step_result.message = "Attestation error: " + audit_results

            detailed_report, return_code = self.__audit_attestation(
                workflow_attestation_file_path, workflow_policy_file_path,
                self.DEFAULT_WORKFLOW_POLICY_DATA_QUERY)
            audit_results = detailed_report

        else:
            step_result.message = "Audit was successful"

        step_result.add_artifact(name='audit-results', value=audit_results)

        return step_result
    def _run_step(self):  # pylint: disable=too-many-locals
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """

        step_result = StepResult.from_step_implementer(self)

        try:
            self._execute_npm_publish()
        except StepRunnerException as error:
            self._handle_step_runner_exception(step_result, error)
        finally:
            pass

        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # create the gauge
        registry = CollectorRegistry()
        gauge = Gauge(
            name='commit_timestamp',
            documentation='Pelorus Software Delivery Performance (SDP) Commit timestamp.' \
                'https://github.com/konveyor/pelorus/blob/master/exporters/committime/README.md#commit-time-exporter',
            registry=registry,
            labelnames=['app', 'commit', 'image_sha']
        )

        # set the value and its labels
        gauge.labels(app=self.pelorus_app_name,
                     commit=self.commit_hash,
                     image_sha=self.container_image_digest).set(
                         self.commit_utc_timestamp)

        # push to prometheus pushgateway
        try:
            push_to_gateway(gateway=self.pelorus_prometheus_pushgateway_url,
                            job=self.pelorus_prometheus_job,
                            grouping_key={
                                'job': self.pelorus_prometheus_job,
                                'app': self.pelorus_app_name,
                                'commit': self.commit_hash
                            },
                            registry=registry)
        except Exception as error:  # pylint: disable=broad-except
            step_result.success = False
            step_result.message = "Error pushing Pelorus Commit Timestamp metric to" \
                f" Prometheus Pushgateway ({self.pelorus_prometheus_pushgateway_url}): {error}"

        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        argocd_result_set = self.get_value('argocd-deployed-manifest')

        if not os.path.exists(argocd_result_set):
            step_result.success = False
            step_result.message = 'File specified in ' \
                                  f'argocd-deployed-manifest {argocd_result_set} not found'
            return step_result

        step_result.add_artifact(name='configlint-yml-path',
                                 value=argocd_result_set)
        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # run the tests
        print("Run unit tests")
        mvn_output_file_path = self.write_working_file('mvn_output.txt')
        try:
            # execute maven step (params come from config)
            self._run_maven_step(mvn_output_file_path=mvn_output_file_path)
        except StepRunnerException as error:
            step_result.success = False
            step_result.message = "Error running maven. " \
                f"More details maybe found in report artifacts: {error}"
        finally:
            step_result.add_artifact(
                description="Standard out and standard error from maven.",
                name='maven-output',
                value=mvn_output_file_path)

        # get test report dir
        test_report_dirs = self.__get_test_report_dirs()
        if test_report_dirs:
            step_result.add_artifact(
                description="Test report generated when running unit tests.",
                name='test-report',
                value=test_report_dirs)

            # gather test report evidence
            self._gather_evidence_from_test_report_directory_testsuite_elements(
                step_result=step_result, test_report_dirs=test_report_dirs)

        # return result
        return step_result
Beispiel #19
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """

        step_result = StepResult.from_step_implementer(self)
        cz_json_path = self.get_value('cz-json')

        repo_root = self.get_value('repo-root')
        repo = Repo(repo_root)
        os.chdir(repo_root)

        with open(cz_json_path, 'rb+') as cz_json:
            cz_json_contents = json.loads(cz_json.read())
            cz_json_contents['commitizen']['version'] = self._get_version_tag(repo.tags)
            cz_json.seek(0)
            cz_json.truncate(0)
            cz_json.write(json.dumps(cz_json_contents).encode())

        out = io.StringIO()
        sh.cz.bump( # pylint: disable=no-member
            '--dry-run',
            '--yes',
            _out=out,
            _err=sys.stderr,
            _tee='err'
        )
        bump_regex = r'tag to create: (\d+.\d+.\d+)'
        version = re.findall(bump_regex, out.getvalue(),)[0]
        step_result.add_artifact(name='app-version', value=version)

        return step_result
    def _run_step(self):  # pylint: disable=too-many-locals
        """
        Runs the step implemented by this StepImplementer to delete a ArgoCD
        application by name.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        try:
            argocd_app_name = self._get_app_name()
            step_result.add_artifact(name='argocd-app-name',
                                     value=argocd_app_name)

            print("Sign into ArgoCD")
            self._argocd_sign_in(argocd_api=self.argocd_api,
                                 username=self.argocd_username,
                                 password=self.argocd_password,
                                 insecure=self.argocd_skip_tls)

            print(f"Delete ArgoCD Application ({argocd_app_name})")

            self._argocd_app_delete(
                argocd_app_name,
                argocd_cascade=self.argocd_cascade,
                argocd_propagation_policy=self.argocd_propagation_policy)

        except StepRunnerException as error:
            step_result.success = False
            step_result.message = f"Error deleting environment ({self.environment}):" \
                                  f" {str(error)}"

        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        #Gather all evidence from steps/sub steps
        evidence = self.__gather_evidence()

        #If no evidence is found then return a step result reflecting that status.
        if not evidence:
            step_result.add_artifact(
                name='result-generate-evidence',
                value='No evidence to generate.',
                description='Evidence from all previously run steps.')
            step_result.message = "No evidence generated from previously run steps"
            return step_result

        org = self.get_value('organization')
        app = self.get_value('application-name')
        service = self.get_value('service-name')
        version = self.get_value('version')

        result_evidence_name = f"{org}-{app}-{service}-{version}-evidence.json"

        work_dir = self.work_dir_path

        evidence_path = os.path.join(work_dir, result_evidence_name)

        #Format into json and write out to file in working directory
        with open(evidence_path, "w", encoding='utf-8') as evidence_file:
            json.dump(evidence, evidence_file, indent=4)

        evidence_destination_url = self.get_value('evidence-destination-url')
        evidence_destination_uri = f"{evidence_destination_url}/" \
                f"{org}/{app}/{service}/{result_evidence_name}"

        #Upload file to datastore
        if evidence_file and evidence_destination_url:
            try:
                upload_result = upload_file(
                    file_path=evidence_path,
                    destination_uri=evidence_destination_uri,
                    username=self.get_value('evidence-destination-username'),
                    password=self.get_value('evidence-destination-password'))
                step_result.add_artifact(
                    name='evidence-upload-results',
                    description='Results of uploading the evidence JSON file ' \
                        'to the given destination.',
                    value=upload_result
                )
                step_result.add_artifact(
                    name='evidence-uri',
                    description='URI of the uploaded results archive.',
                    value=evidence_destination_uri)

                step_result.success = True
                step_result.message = 'Evidence successfully packaged ' \
                'in JSON file and uploaded to data store.'

            except RuntimeError as error:
                step_result.success = False
                step_result.message = str(error)
                return step_result
        else:
            step_result.success = True
            step_result.message = 'Evidence successfully packaged ' \
            'in JSON file but was not uploaded to data store (no '\
            'destination URI specified).'

        step_result.add_artifact(name='evidence-path',
                                 value=evidence_path,
                                 description='File path of evidence.')

        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        username = None
        password = None
        token = None

        if self.has_config_value(AUTHENTICATION_CONFIG, True):
            # Optional: token
            if self.get_value('token'):
                token = self.get_value('token')
            # Optional: username and password
            else:
                if (self.get_value('username')
                        and self.get_value('password')):
                    username = self.get_value('username')
                    password = self.get_value('password')

        application_name = self.get_value('application-name')
        service_name = self.get_value('service-name')
        properties_file = self.get_value('properties')

        # Optional: project-key
        if self.get_value('project-key'):
            project_key = self.get_value('project-key')
        # Default
        else:
            project_key = f'{application_name}:{service_name}'

        if not os.path.exists(properties_file):
            step_result.success = False
            step_result.message = f'Properties file not found: {properties_file}'
            return step_result

        sonarqube_success = False
        try:
            # Hint:  Call sonar-scanner with sh.sonar_scanner
            #    https://amoffat.github.io/sh/sections/faq.html
            working_directory = self.work_dir_path

            sonar_optional_flags = []
            # determine auth flags
            if token:
                sonar_optional_flags += [
                    f'-Dsonar.login={token}'
                ]
            elif username:
                sonar_optional_flags += [
                    f'-Dsonar.login={username}',
                    f'-Dsonar.password={password}',
                ]

            # determine branch flag
            # only provide sonar.branch.name flag if not the "main"/"master"/"release branch" and
            # sonar-analyze-branches is true (since can only due with certain versions of SonarQube)
            # see: https://community.sonarsource.com/t/how-to-change-the-main-branch-in-sonarqube/13669/8
            if self.get_value('sonar-analyze-branches') and not self.__is_release_branch():
                sonar_optional_flags += [
                    f"-Dsonar.branch.name={self.get_value('branch')}",
                ]

            # run scan
            sh.sonar_scanner(  # pylint: disable=no-member
                f'-Dproject.settings={properties_file}',
                f"-Dsonar.host.url={self.get_value('url')}",
                f"-Dsonar.projectVersion={self.get_value('version')}",
                f'-Dsonar.projectKey={project_key}',
                f'-Dsonar.working.directory={working_directory}',
                *sonar_optional_flags,
                _env={
                    "SONAR_SCANNER_OPTS": \
                        f"-Djavax.net.ssl.trustStore={self.get_value('java-truststore')}"
                },
                _out=sys.stdout,
                _err=sys.stderr
            )
            sonarqube_success = True
        except sh.ErrorReturnCode_1 as error: # pylint: disable=no-member
            # Error Code 1: INTERNAL_ERROR
            # See error codes: https://github.com/SonarSource/sonar-scanner-cli/blob/master/src/main/java/org/sonarsource/scanner/cli/Exit.java # pylint: disable=line-too-long
            step_result.success = False
            step_result.message = "Error running static code analysis" \
                f" using sonar-scanner: {error}"
        except sh.ErrorReturnCode_2: # pylint: disable=no-member
            # Error Code 2: USER_ERROR
            # See error codes: https://github.com/SonarSource/sonar-scanner-cli/blob/master/src/main/java/org/sonarsource/scanner/cli/Exit.java # pylint: disable=line-too-long
            step_result.success = False
            step_result.message = "Static code analysis failed." \
                " See 'sonarqube-result-set' result artifact for details."
        except sh.ErrorReturnCode as error: # pylint: disable=no-member
            # Error Code Other: unexpected
            # See error codes: https://github.com/SonarSource/sonar-scanner-cli/blob/master/src/main/java/org/sonarsource/scanner/cli/Exit.java # pylint: disable=line-too-long
            step_result.success = False
            step_result.message = "Unexpected error running static code analysis" \
                f" using sonar-scanner: {error}"

        step_result.add_artifact(
            name='sonarqube-result-set',
            value=f'{working_directory}/report-task.txt'
        )

        step_result.add_evidence(
            name='sonarqube-quality-gate-pass',
            value=sonarqube_success
        )

        return step_result
Beispiel #23
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # get the pgp private key to sign the image with
        signer_pgp_private_key = self.get_value([
            'signer-pgp-private-key', 'container-image-signer-pgp-private-key'
        ])

        # get the uri to the image to sign
        container_image_address = self._get_deploy_time_container_image_address(
        )

        image_signatures_directory = self.create_working_dir_sub_dir(
            sub_dir_relative_path='image-signature')
        try:
            # import the PGP key and get the finger print
            signer_pgp_private_key_fingerprint = import_pgp_key(
                pgp_private_key=signer_pgp_private_key)
            step_result.add_artifact(
                name='container-image-signature-signer-private-key-fingerprint',
                value=signer_pgp_private_key_fingerprint)

            # login to any provider container registries
            # NOTE 1: can not use auth file, even though we want to, because podman image sign
            #         does not accept authfile.
            #         https://github.com/containers/podman/issues/10866
            # NOTE 2: have to force login to use podman because even though logging in with
            #         any of the tools should work, in testing the podman sign only worked
            #         from within the python virtual environment if the login happened with podman.
            container_registries_login(
                registries=self.get_value('container-registries'),
                containers_config_tls_verify=util.strtobool(
                    self.get_value('src-tls-verify')),
                container_command_short_name='podman')

            # sign the image
            signature_file_path = PodmanSign.__sign_image(
                pgp_private_key_fingerprint=signer_pgp_private_key_fingerprint,
                image_signatures_directory=image_signatures_directory,
                container_image_address=container_image_address)
            step_result.add_artifact(
                name='container-image-signed-address',
                value=container_image_address,
            )
            step_result.add_artifact(
                name='container-image-signature-file-path',
                value=signature_file_path,
            )
            signature_name = os.path.relpath(signature_file_path,
                                             image_signatures_directory)
            step_result.add_artifact(name='container-image-signature-name',
                                     value=signature_name)

            # upload the image signature
            container_image_signature_destination_url = self.get_value(
                'container-image-signature-destination-url')
            if container_image_signature_destination_url:
                container_image_signature_destination_uri = \
                    f"{container_image_signature_destination_url}/{signature_name}"
                step_result.add_artifact(
                    name='container-image-signature-uri',
                    description='URI of the uploaded container image signature',
                    value=container_image_signature_destination_uri)

                upload_result = upload_file(
                    file_path=signature_file_path,
                    destination_uri=container_image_signature_destination_uri,
                    username=self.get_value(
                        'container-image-signature-destination-username'),
                    password=self.get_value(
                        'container-image-signature-destination-password'))
                step_result.add_artifact(
                    name='container-image-signature-upload-results',
                    description='Results of uploading the container image signature' \
                                ' to the given destination.',
                    value=upload_result
                )
        except (RuntimeError, StepRunnerException) as error:
            step_result.success = False
            step_result.message = str(error)

        return step_result
Beispiel #24
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # get config
        image_spec_file = self.get_value('imagespecfile')
        tls_verify = self.get_value('tls-verify')
        if isinstance(tls_verify, str):
            tls_verify = bool(util.strtobool(tls_verify))

        # create local build tag
        container_image_build_address, container_image_build_short_address, \
        contaimer_image_registry, container_image_repository, container_image_tag = \
            determine_container_image_address_info(
                contaimer_image_registry='localhost',
                container_image_tag=self.get_value([
                    'container-image-tag',
                    'container-image-version'
                ]),
                organization=self.get_value('organization'),
                application_name=self.get_value('application-name'),
                service_name=self.get_value('service-name')
            )

        try:
            # login to any provider container registries
            # NOTE: important to specify the auth file because depending on the context this is
            #       being run in python process may not have permissions to default location
            containers_config_auth_file = self.get_value('containers-config-auth-file')
            if not containers_config_auth_file:
                containers_config_auth_file = os.path.join(
                    self.work_dir_path,
                    'container-auth.json'
                )
            container_registries_login(
                registries=self.get_value('container-registries'),
                containers_config_auth_file=containers_config_auth_file,
                containers_config_tls_verify=tls_verify
            )

            # perform build
            sh.buildah.bud(  # pylint: disable=no-member
                '--format=' + self.get_value('format'),
                '--tls-verify=' + str(tls_verify).lower(),
                '--layers', '-f', image_spec_file,
                '-t', container_image_build_address,
                '--authfile', containers_config_auth_file,
                self.get_value('context'),
                _out=sys.stdout,
                _err=sys.stderr,
                _tee='err'
            )

        except sh.ErrorReturnCode as error:  # pylint: disable=undefined-variable
            step_result.success = False
            step_result.message = 'Issue invoking buildah bud with given image ' \
                f'specification file ({image_spec_file}): {error}'

        # get image digest
        container_image_digest = None
        if step_result.success:
            try:
                print("Get container image digest")
                container_image_digest = get_container_image_digest(
                    container_image_address=container_image_build_address
                )
            except RuntimeError as error:
                step_result.success = False
                step_result.message = f"Error getting built container image digest: {error}"

        # add artifacts
        add_container_build_step_result_artifacts(
            step_result=step_result,
            contaimer_image_registry=contaimer_image_registry,
            container_image_repository=container_image_repository,
            container_image_tag=container_image_tag,
            container_image_digest=container_image_digest,
            container_image_build_address=container_image_build_address,
            container_image_build_short_address=container_image_build_short_address
        )

        return step_result
Beispiel #25
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # get the git repo
        repo = None
        try:
            repo = self.git_repo
        except StepRunnerException as error:
            step_result.success = False
            step_result.message = str(error)
            return step_result

        if repo.bare:
            step_result.success = False
            step_result.message = 'Given git-repo-root is not a Git repository'
            return step_result

        # Need to be able to determine the branch name to determine if is a pre-release build or not
        if repo.head.is_detached:
            step_result.success = False
            step_result.message = 'Expected a Git branch in given git repo root' \
                ' but has a detached head'
            return step_result

        # add branch artifact
        git_branch = repo.active_branch.name
        step_result.add_artifact(name='branch', value=git_branch)

        # determine if pre-release
        release_branch_regexes = self.get_value('release-branch-regexes')
        if release_branch_regexes:
            is_pre_release = True
            if not isinstance(release_branch_regexes, list):
                release_branch_regexes = [release_branch_regexes]
            for release_branch_regex in release_branch_regexes:
                if re.match(release_branch_regex, git_branch):
                    is_pre_release = False
                    break
        else:
            is_pre_release = True

        # add pre-release artifact
        step_result.add_artifact(name='is-pre-release', value=is_pre_release)

        # commit and push changes
        if self.__should_commit_changes_and_push(is_pre_release):
            try:
                self.commit_changes_and_push()
            except StepRunnerException as error:
                step_result.success = False
                step_result.message = f"Error committing and pushing changes: {error}"
                return step_result

        # add commit hash artifact
        try:
            git_branch_last_commit_sha = str(repo.head.reference.commit)
            step_result.add_artifact(name='commit-hash',
                                     value=git_branch_last_commit_sha)
        except ValueError:
            step_result.success = False
            step_result.message = f'Given Git repository root is a' \
                f' git branch ({git_branch}) with no commit history.'
            return step_result

        # add commit timestamp
        step_result.add_artifact(name='commit-utc-timestamp',
                                 value=self.git_commit_utc_timestamp())

        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # configlint-yml-path is required
        configlint_yml_path = self.get_value('configlint-yml-path')

        if not os.path.exists(configlint_yml_path):
            step_result.success = False
            step_result.message = 'File specified in ' \
                                  f'configlint-yml-path not found: {configlint_yml_path}'
            return step_result

        # Required: rules and exists
        rules_file = self.get_value('rules')
        if not os.path.exists(rules_file):
            step_result.success = False
            step_result.message = f'File specified in rules not found: {rules_file}'
            return step_result

        configlint_results_file_path = self.write_working_file(
            'configlint_results_file.txt')
        try:
            # run config-lint writing stdout and stderr to the standard streams
            # as well as to a results file.
            with open(configlint_results_file_path, 'w', encoding='utf-8') \
                    as configlint_results_file:
                out_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                    [sys.stdout, configlint_results_file])
                err_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                    [sys.stderr, configlint_results_file])

                sh.config_lint(  # pylint: disable=no-member
                    "-verbose",
                    "-debug",
                    "-rules",
                    rules_file,
                    configlint_yml_path,
                    _encoding='UTF-8',
                    _out=out_callback,
                    _err=err_callback,
                    _tee='err')
        except sh.ErrorReturnCode_255:  # pylint: disable=no-member
            # NOTE: expected failure condition,
            #       aka, the config lint run, but found an issue
            #       stderr/stdout is captured in configlint_results_file_path
            step_result.success = False
            step_result.message = 'Failed config-lint scan.'
        except sh.ErrorReturnCode:
            # NOTE: un-expected failure condition
            #       aka, the config lint failed to run for some reason
            #       stderr/stdout is captured in configlint_results_file_path
            step_result.success = False
            step_result.message = 'Unexpected Error invoking config-lint.'

        step_result.add_artifact(name='configlint-result-set',
                                 value=configlint_results_file_path)
        step_result.add_artifact(name='configlint-yml-path',
                                 value=configlint_yml_path)
        return step_result
    def _run_step(self):  # pylint: disable=too-many-locals
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # create local build container image address
        container_image_build_address, container_image_build_short_address, \
        contaimer_image_registry, container_image_repository, container_image_tag = \
            determine_container_image_address_info(
                contaimer_image_registry='localhost',
                container_image_tag=self.get_value([
                    'container-image-tag',
                    'container-image-version'
                ]),
                organization=self.get_value('organization'),
                application_name=self.get_value('application-name'),
                service_name=self.get_value('service-name')
            )

        # build the container image
        mvn_jkube_output_file_path = self.write_working_file(
            'mvn_k8s_build_output.txt')
        try:
            # execute maven step (params come from config)
            print("Build container image with Maven Jkube kubernetes plugin")
            self._run_maven_step(
                mvn_output_file_path=mvn_jkube_output_file_path,
                step_implementer_additional_arguments=[
                    f"-Djkube.generator.name={container_image_build_address}"
                ])
        except StepRunnerException as error:
            step_result.success = False
            step_result.message = "Error running 'maven k8s:build' to create container image. " \
                f"More details maybe found in 'maven-jkube-output' report artifact: {error}"
        finally:
            step_result.add_artifact(
                description="Standard out and standard error from running maven to " \
                    "create container image.",
                name='maven-jkube-output',
                value=mvn_jkube_output_file_path
            )

        # get image digest
        container_image_digest = None
        if step_result.success:
            try:
                print("Get container image digest")
                container_image_digest = get_container_image_digest(
                    container_image_address=container_image_build_address)
            except RuntimeError as error:
                step_result.success = False
                step_result.message = f"Error getting built container image digest: {error}"

        # add artifacts
        add_container_build_step_result_artifacts(
            step_result=step_result,
            contaimer_image_registry=contaimer_image_registry,
            container_image_repository=container_image_repository,
            container_image_tag=container_image_tag,
            container_image_digest=container_image_digest,
            container_image_build_address=container_image_build_address,
            container_image_build_short_address=
            container_image_build_short_address)

        return step_result
Beispiel #28
0
    def _run_step(self):  # pylint: disable=too-many-locals
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # get src image config
        pull_registry_type = self.get_value([
            'container-image-pull-registry-type',
            'container-image-registry-type'
        ])
        container_image_pull_address = self.get_value(
            ['container-image-pull-address', 'container-image-build-address'])
        source_tls_verify = self.get_value(
            ['source-tls-verify', 'src-tls-verify'])
        if isinstance(source_tls_verify, str):
            source_tls_verify = bool(util.strtobool(source_tls_verify))

        # create destination config
        push_registry_type = self.get_value([
            'container-image-push-registry-type',
            'container-image-registry-type'
        ])
        container_image_push_registry = self.get_value(
            ['container-image-push-registry', 'destination-url'])
        container_image_push_repository = self.get_value(
            ['container-image-push-repository', 'container-image-repository'])
        container_image_push_tag = self.get_value([
            'container-image-push-tag', 'container-image-tag',
            'container-image-version'
        ])
        dest_tls_verify = self.get_value('dest-tls-verify')
        if isinstance(dest_tls_verify, str):
            dest_tls_verify = bool(util.strtobool(dest_tls_verify))
        container_image_push_short_address = \
            f"{container_image_push_repository}:{container_image_push_tag}"
        container_image_push_address_by_tag = f"{container_image_push_registry}" \
           f"/{container_image_push_short_address}"

        try:
            # login to any provider container registries
            # NOTE: important to specify the auth file because depending on the context this is
            #       being run in python process may not have permissions to default location
            containers_config_auth_file = self.get_value(
                'containers-config-auth-file')
            if not containers_config_auth_file:
                containers_config_auth_file = os.path.join(
                    self.work_dir_path, 'container-auth.json')
            container_registries_login(
                registries=self.get_value('container-registries'),
                containers_config_auth_file=containers_config_auth_file,
                containers_config_tls_verify=dest_tls_verify)

            # push image
            sh.skopeo.copy(  # pylint: disable=no-member
                f"--src-tls-verify={str(source_tls_verify).lower()}",
                f"--dest-tls-verify={str(dest_tls_verify).lower()}",
                f"--authfile={containers_config_auth_file}",
                f'{pull_registry_type}{container_image_pull_address}',
                f'{push_registry_type}{container_image_push_address_by_tag}',
                _out=sys.stdout,
                _err=sys.stderr,
                _tee='err')
        except sh.ErrorReturnCode as error:
            step_result.success = False
            step_result.message = f'Error pushing container image ({container_image_pull_address}) ' \
                f' to tag ({container_image_push_address_by_tag}) using skopeo: {error}'

        # add address part artifacts
        step_result.add_artifact(
            name='container-image-push-registry',
            value=container_image_push_registry,
            description=
            'Container image registry container image was pushed to.')
        step_result.add_artifact(
            name='container-image-push-repository',
            value=container_image_push_repository,
            description=
            'Container image repository container image was pushed to.')
        step_result.add_artifact(
            name='container-image-push-tag',
            value=container_image_push_tag,
            description='Container image tag container image was pushed to.')

        # add address by tag artifacts
        step_result.add_artifact(
            name='container-image-address-by-tag',
            value=container_image_push_address_by_tag,
            description='Pushed container image address by tag.')
        step_result.add_artifact(
            name='container-image-short-address-by-tag',
            value=container_image_push_short_address,
            description=
            'Pushed container image short address (no registry) by tag.')

        # add address by digest artifacts
        if step_result.success:
            try:
                print("Get pushed container image digest")
                container_image_digest = get_container_image_digest(
                    container_image_address=container_image_push_address_by_tag,
                    containers_config_auth_file=containers_config_auth_file)

                container_image_short_address_by_digest = \
                    f"{container_image_push_repository}@{container_image_digest}"
                container_image_address_by_digest = \
                    f"{container_image_push_registry}/{container_image_short_address_by_digest}"

                step_result.add_artifact(
                    name='container-image-push-digest',
                    value=container_image_digest,
                    description=
                    'Container image digest container image was pushed to.')
                step_result.add_artifact(
                    name='container-image-address-by-digest',
                    value=container_image_address_by_digest,
                    description='Pushed container image address by digest.')
                step_result.add_artifact(
                    name='container-image-short-address-by-digest',
                    value=container_image_short_address_by_digest,
                    description=
                    'Pushed container image short address (no registry) by digest.'
                )
            except RuntimeError as error:
                step_result.success = False
                step_result.message = f"Error getting pushed container image digest: {error}"

        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # get values
        builder_image = self.get_value('s2i-builder-image')

        # determine tls flag
        tls_verify = self.get_value('tls-verify')
        if isinstance(tls_verify, str):
            tls_verify = bool(util.strtobool(tls_verify))
        if tls_verify:
            s2i_tls_flags = ['--tlsverify']
        else:
            s2i_tls_flags = []

        # determine the generated imagespec file
        s2i_working_dir = self.create_working_dir_sub_dir('s2i-context')
        imagespecfile = self.write_working_file(
            os.path.join(s2i_working_dir, 'Containerfile.s2i-gen'))

        # determine image scripts url flags
        # use user provided url if given,
        # else try and inspect from builder image
        s2i_image_scripts_url = self.get_value('s2i-image-scripts-url')
        if not s2i_image_scripts_url:
            print(
                'Attempt to inspect builder image for label for image scripts url'
            )

            # attempt to auth with container image registries
            # login to any provider container registries
            # NOTE: important to specify the auth file because depending on the context this is
            #       being run in python process may not have permissions to default location
            containers_config_auth_file = self.get_value(
                'containers-config-auth-file')
            if not containers_config_auth_file:
                containers_config_auth_file = os.path.join(
                    self.work_dir_path, 'container-auth.json')
            try:
                container_registries_login(
                    registries=self.get_value('container-registries'),
                    containers_config_auth_file=containers_config_auth_file,
                    containers_config_tls_verify=tls_verify)
            except RuntimeError as error:
                step_result.message += "WARNING: error authenticating with" \
                    " container image registries to be able to pull s2i builder image" \
                    f" to inspect for image scripts url: {error}\n"

            # if not given, attempt to get from builder image labels
            try:
                container_image_details = inspect_container_image(
                    container_image_address=builder_image,
                    containers_config_auth_file=containers_config_auth_file)

                s2i_image_scripts_url = container_image_details['OCIv1']['config']['Labels']\
                    [SourceToImage.CONTAINER_LABEL_SCRIPTS_URL]
            except RuntimeError as error:
                step_result.message += "WARNING: failed to inspect s2i builder image" \
                    f" ({builder_image}) to dynamically determine image scripts url." \
                    f" S2I default will be used: {error}\n"
            except KeyError as error:
                step_result.message += "WARNING: failed to find s2i scripts url label" \
                    f" ({SourceToImage.CONTAINER_LABEL_SCRIPTS_URL}) on s2i builder image" \
                    f" ({builder_image}) to dynamically determine image scripts url." \
                    f" S2I default will be used: Could not find key ({error}).\n"

        # if determined image scripts url set the flag
        # else s2i will use its default (image:///usr/libexec/s2i)
        if s2i_image_scripts_url:
            s2i_image_scripts_url_flags = [
                '--image-scripts-url', s2i_image_scripts_url
            ]
        else:
            s2i_image_scripts_url_flags = []

        try:
            # perform build
            print(
                'Use s2i to generate imagespecfile and accompanying resources')
            sh.s2i.build(  # pylint: disable=no-member
                self.get_value('context'),
                builder_image,
                '--loglevel',
                self.get_value('s2i-loglevel'),
                *s2i_tls_flags,
                '--as-dockerfile',
                imagespecfile,
                *s2i_image_scripts_url_flags,
                *self.get_value('s2i-additional-arguments'),
                _out=sys.stdout,
                _err=sys.stderr,
                _tee='err')
        except sh.ErrorReturnCode as error:  # pylint: disable=undefined-variable
            step_result.success = False
            step_result.message += f'Issue invoking s2i build: {error}'

        # add artifacts
        step_result.add_artifact(
            name='imagespecfile',
            value=imagespecfile,
            description=
            'File defining the container image to build generated by s2i')
        step_result.add_artifact(
            name='context',
            value=s2i_working_dir,
            description=
            'Context to use when building the imagespecfile generated by S2I.')

        return step_result
Beispiel #30
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # NOTE:
        #   at some point may need to do smarter logic if a deployable has more then one deployed
        #   host URL to do UAT against all of them, but for now, use first one as target of UAT
        deployed_host_urls = self.get_value('deployed-host-urls')
        target_host_url = "unset"
        if isinstance(deployed_host_urls, list):
            target_host_url = deployed_host_urls[0]
            if len(deployed_host_urls) > 1:
                step_result.message = \
                    f"Given more than one deployed host URL ({deployed_host_urls})," \
                    f" targeting first one ({target_host_url}) for test."
                print(step_result.message)
        elif deployed_host_urls:
            target_host_url = deployed_host_urls
        else:
            target_host_url = self.get_value('target-host-url')

        # run the tests
        npm_output_file_path = self.write_working_file('npm_output.txt')
        try:
            self.npm_args = ['run', self.get_value('npm-test-script')]

            additional_envs = None
            if self.get_value("target-host-env-var-name"):
                additional_envs = {self.get_value(
                    "target-host-env-var-name"): target_host_url}

            # execute npm step
            self._run_npm_step(
                npm_output_file_path=npm_output_file_path,
                step_implementer_additional_envs=additional_envs
            )
        except StepRunnerException as error:
            step_result.success = False
            step_result.message = f"Error running npm. More details maybe found in report artifacts: {error}"
        finally:
            step_result.add_artifact(
                description="Standard out and standard error from npm.",
                name='npm-output',
                value=npm_output_file_path
            )

        test_report_dirs = self.get_value(['test-reports-dir','test-reports-dirs'])
        if test_report_dirs:
            step_result.add_artifact(
                description="Test report generated when running tests.",
                name='test-report',
                value=test_report_dirs
            )

            # gather test report evidence
            self._gather_evidence_from_test_report_directory_testsuite_elements(
                step_result=step_result,
                test_report_dirs=test_report_dirs
            )

        # return result
        return step_result