Exemple #1
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        tox_output_file_path = self.write_working_file('tox_test_output.txt')

        try:
            self._run_tox_step(tox_output_file_path=tox_output_file_path)

        except StepRunnerException as error:
            step_result.message = "Unit test failures. See 'tox-output'" \
                f" report artifacts for details: {error}"
            step_result.success = False
        finally:
            step_result.add_artifact(
                description="Standard out and standard error from 'tox test'.",
                name='tox-output',
                value=tox_output_file_path)

        return step_result
Exemple #2
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # package the artifacts
        npm_output_file_path = self.write_working_file('npm_output.txt')
        try:
            # execute npm step (params come from config)
            self._run_npm_step(npm_output_file_path=npm_output_file_path)
        except StepRunnerException as error:
            step_result.success = False
            step_result.message = "Error running npm. " \
                f"More details maybe found in 'npm-output' report artifact: {error}"
        finally:
            step_result.add_artifact(
                description="Standard out and standard error from npm.",
                name='npm-output',
                value=npm_output_file_path)

        return step_result
Exemple #3
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        npm_output_file_path = self.write_working_file(
            'npm_package_output.txt')
        try:
            self._run_npm_step(npm_output_file_path=npm_output_file_path)

        except StepRunnerException as error:
            step_result.message = "Error running npm install. See 'npm-output'" \
                f" report artifacts for details: {error}"
            step_result.success = False
        finally:
            step_result.add_artifact(
                description=
                "Standard out and standard error from 'npm install'.",
                name='npm-output',
                value=npm_output_file_path)

        return step_result
Exemple #4
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        app_version = None
        pre_release = None
        build = None
        release_branch = self.get_value('release-branch')
        app_version = self.get_value('app-version')
        pre_release = self.get_value('pre-release')
        build = self.get_value('build')

        if pre_release == release_branch:
            version = f'{app_version}+{build}'
            image_tag = f'{app_version}'
        else:
            version = f'{app_version}-{pre_release}+{build}'
            image_tag = f'{app_version}-{pre_release}'

        step_result.add_artifact(name='version', value=version)

        step_result.add_artifact(name='container-image-version',
                                 value=image_tag)

        step_result.add_evidence(name='version', value=version)
        step_result.add_evidence(name='container-image-version',
                                 value=image_tag)

        return step_result
Exemple #5
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        pom_file = self.get_value('pom-file')

        pom_version = None
        try:
            pom_version_element = get_xml_element(pom_file, 'version')
            pom_version = pom_version_element.text
        except ValueError:
            pom_version = None

        if not pom_version:
            step_result.success = False
            step_result.message = f'Given pom file ({pom_file})' + \
                ' does not contain a \"version\" key.'
            return step_result

        step_result.add_artifact(
            name='app-version',
            value=pom_version
        )

        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        image_version = self.get_value('container-image-version').lower()
        application_name = self.get_value('application-name')
        service_name = self.get_value('service-name')
        organization = self.get_value('organization')
        image_tar_file = self.get_value('image-tar-file')
        destination_url = self.get_value('destination-url')

        image_registry_uri = destination_url
        image_registry_organization = organization
        image_repository = f"{application_name}-{service_name}"
        image_tag = f"{image_registry_uri}/{image_registry_organization}" \
                    f"/{image_repository}:{image_version}"

        try:
            # login to any provider container registries
            # NOTE: important to specify the auth file because depending on the context this is
            #       being run in python process may not have permissions to default location
            containers_config_auth_file = self.get_value('containers-config-auth-file')
            container_registries_login(
                registries=self.get_value('container-registries'),
                containers_config_auth_file=containers_config_auth_file
            )

            # push image
            sh.skopeo.copy( # pylint: disable=no-member
                f"--src-tls-verify={str(self.get_value('src-tls-verify'))}",
                f"--dest-tls-verify={str(self.get_value('dest-tls-verify'))}",
                f"--authfile={containers_config_auth_file}",
                f'docker-archive:{image_tar_file}',
                f'docker://{image_tag}',
                _out=sys.stdout,
                _err=sys.stderr,
                _tee='err'
            )
        except sh.ErrorReturnCode as error:
            step_result.success = False
            step_result.message = f'Error pushing container image ({image_tar_file}) ' \
                f' to tag ({image_tag}) using skopeo: {error}'

        step_result.add_artifact(name='container-image-registry-uri', value=image_registry_uri)
        step_result.add_artifact(
            name='container-image-registry-organization',
            value=image_registry_organization
        )
        step_result.add_artifact(name='container-image-repository', value=image_repository)
        step_result.add_artifact(name='container-image-name', value=image_repository)
        step_result.add_artifact(name='container-image-version', value=image_version)
        step_result.add_artifact(name='container-image-tag', value=image_tag)

        return step_result
    def test_from_step_implementer_with_env(self):
        config = Config({
            'step-runner-config': {
                'foo': {
                    'implementer':
                    'tests.helpers.sample_step_implementers.FooStepImplementer',
                    'config': {}
                }
            }
        })
        step_config = config.get_step_config('foo')
        sub_step = step_config.get_sub_step(
            'tests.helpers.sample_step_implementers.FooStepImplementer')

        step = FooStepImplementer(workflow_result=WorkflowResult(),
                                  parent_work_dir_path=None,
                                  config=sub_step,
                                  environment='blarg')

        step_result = StepResult.from_step_implementer(step)

        expected_step_result = StepResult(
            step_name='foo',
            sub_step_name=
            'tests.helpers.sample_step_implementers.FooStepImplementer',
            sub_step_implementer_name=
            'tests.helpers.sample_step_implementers.FooStepImplementer',
            environment='blarg')

        self.assertEqual(step_result, expected_step_result)
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        package_file = self.get_value('package-file')

        with open(package_file) as package_file_object:
            package_file_data = json.load(package_file_object)

        if not "version" in package_file_data:
            step_result.success = False
            step_result.message = f'Given npm package file ({package_file})' + \
              ' does not contain a \"version\" key.'
            return step_result

        step_result.add_artifact(name='app-version',
                                 value=package_file_data["version"])

        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        results_artifacts_archive = self.__create_archive()

        if results_artifacts_archive:
            results_artifacts_archive_value = results_artifacts_archive
        else:
            results_artifacts_archive_value = 'No result artifact values to archive.'

        step_result.add_artifact(
            name='result-artifacts-archive',
            value=results_artifacts_archive_value,
            description=
            'Archive of all of the step result artifacts marked for archiving.'
        )

        # if destination URL specified, then upload the results archvie
        results_archive_destination_url = self.get_value(
            'results-archive-destination-url')
        if results_artifacts_archive and results_archive_destination_url:
            org = self.get_value('organization')
            app = self.get_value('application-name')
            service = self.get_value('service-name')
            results_artifacts_archive_name = os.path.basename(
                results_artifacts_archive)
            results_archive_destination_uri = f"{results_archive_destination_url}/" \
                f"{org}/{app}/{service}/{results_artifacts_archive_name}"
            step_result.add_artifact(
                name='results-archive-uri',
                description='URI of the uploaded results archive.',
                value=results_archive_destination_uri)

            try:
                upload_result = upload_file(
                    file_path=results_artifacts_archive_value,
                    destination_uri=results_archive_destination_uri,
                    username=self.get_value(
                        'results-archive-destination-username'),
                    password=self.get_value(
                        'results-archive-destination-password'))
                step_result.add_artifact(
                    name='results-archive-upload-results',
                    description='Results of uploading the results archive ' \
                        'to the given destination.',
                    value=upload_result
                )
            except RuntimeError as error:
                step_result.success = False
                step_result.message = str(error)

        return step_result
Exemple #10
0
 def _run_step(self):
     step_result = StepResult.from_step_implementer(self)
     runtime_step_config = self.config.get_copy_of_runtime_step_config(
         self.environment, self.step_implementer_config_defaults())
     for n, v in ConfigValue.convert_leaves_to_values(
             runtime_step_config).items():
         step_result.add_artifact(name=n, value=v)
     return step_result
Exemple #11
0
    def _run_step(self):
        step_result = StepResult.from_step_implementer(self)
        runtime_step_config = self.config.get_copy_of_runtime_step_config(
            self.environment, self.step_implementer_config_defaults())

        # copy the key/value pairs into the artifacts
        for name, value in ConfigValue.convert_leaves_to_values(
                runtime_step_config).items():
            # print(name, value)
            step_result.add_artifact(name, value)
        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        repo_root = self.get_value('repo-root')
        build_string_length = self.get_value('build-string-length')

        try:
            repo = Repo(repo_root)
        except InvalidGitRepositoryError:
            step_result.success = False
            step_result.message = 'Given directory (repo_root) is not a Git repository'
            return step_result

        if repo.bare:
            step_result.success = False
            step_result.message = 'Given directory (repo_root) is a bare Git repository'
            return step_result

        # The SemanticVersion StepImplementer uses the branch name (as stored by 'pre-release'
        # below), so this step requires the git repository to not be in a detached head state. If
        # there are any brilliant ideas for specifying an appropriate pre-release value while in a
        # detached head state, pull requests are welcome!
        if repo.head.is_detached:
            step_result.success = False
            step_result.message = 'Expected a Git branch in given directory (repo_root) but has' \
                                  ' a detached head'
            return step_result

        git_branch = str(repo.head.reference)
        pre_release_regex = re.compile(r"/", re.IGNORECASE)
        pre_release = re.sub(pre_release_regex, '_', git_branch)
        step_result.add_artifact(name='pre-release', value=pre_release)

        try:
            git_branch_last_commit_hash = str(
                repo.head.reference.commit)[:build_string_length]

            step_result.add_artifact(name='build',
                                     value=git_branch_last_commit_hash)
        except ValueError:
            step_result.success = False
            step_result.message = 'Given directory (repo_root) is a git branch (git_branch) with' \
                                  ' no commit history'
            return step_result

        return step_result
Exemple #13
0
    def _run_step(self):  # pylint: disable=too-many-locals
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        pom_file = self.get_value('pom-file')
        artifact_extensions = self.get_value('artifact-extensions')
        artifact_parent_dir = self.get_value('artifact-parent-dir')

        # package the artifacts
        mvn_output_file_path = self.write_working_file('mvn_output.txt')
        try:
            # execute maven step (params come from config)
            self._run_maven_step(mvn_output_file_path=mvn_output_file_path)

            # find the artifacts
            packages = []
            pom_file_dir_name = os.path.dirname(os.path.abspath(pom_file))
            files_in_artifact_parent_dir = sorted(
                os.listdir(os.path.join(pom_file_dir_name,
                                        artifact_parent_dir)))
            for filename in files_in_artifact_parent_dir:
                if any(filename.endswith(ext) for ext in artifact_extensions):
                    packages += [{
                        'path':
                        os.path.join(pom_file_dir_name, artifact_parent_dir,
                                     filename)
                    }]

            step_result.add_artifact(name='packages', value=packages)
        except FileNotFoundError as error:
            step_result.success = False
            step_result.message = f"Error finding artifacts after running maven package: {error}"
        except StepRunnerException as error:
            step_result.success = False
            step_result.message = "Error running 'maven package' to package artifacts. " \
                f"More details maybe found in 'maven-output' report artifact: {error}"
        finally:
            step_result.add_artifact(
                description="Standard out and standard error from maven.",
                name='maven-output',
                value=mvn_output_file_path)

        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        repo_root = self.get_value('repo-root')
        build_string_length = self.get_value('build-string-length')

        try:
            repo = Repo(repo_root)
        except InvalidGitRepositoryError:
            step_result.success = False
            step_result.message = 'Given directory (repo_root) is not a Git repository'
            return step_result

        if repo.bare:
            step_result.success = False
            step_result.message = 'Given directory (repo_root) is a bare Git repository'
            return step_result

        if repo.head.is_detached:
            step_result.success = False
            step_result.message = 'Expected a Git branch in given directory (repo_root) but has' \
                                  ' a detached head'
            return step_result

        git_branch = str(repo.head.reference)
        pre_release_regex = re.compile(r"/", re.IGNORECASE)
        pre_release = re.sub(pre_release_regex, '_', git_branch)
        step_result.add_artifact(name='pre-release', value=pre_release)

        try:
            git_branch_last_commit_hash = str(
                repo.head.reference.commit)[:build_string_length]

            step_result.add_artifact(name='build',
                                     value=git_branch_last_commit_hash)
        except ValueError:
            step_result.success = False
            step_result.message = 'Given directory (repo_root) is a git branch (git_branch) with' \
                                  ' no commit history'
            return step_result

        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        work_dir = self.work_dir_path

        #workflow attestation uri
        workflow_attestation_uri = self.get_value('evidence-uri')

        if workflow_attestation_uri is None:
            step_result.success = False
            step_result.message = 'No value found for evidence-uri'
            return step_result

        workflow_attestation_file_path = download_source_to_destination(
            workflow_attestation_uri, work_dir)

        workflow_policy_uri = self.get_value('workflow-policy-uri')

        #Download workflow policy from configured uri
        workflow_policy_file_path = download_source_to_destination(
            workflow_policy_uri, work_dir)

        audit_results, return_code = self.__audit_attestation(
            workflow_attestation_file_path, workflow_policy_file_path,
            self.DEFAULT_WORKFLOW_POLICY_QUERY)

        if return_code == 1:
            step_result.success = False
            step_result.message = "Attestation error: " + audit_results

            detailed_report, return_code = self.__audit_attestation(
                workflow_attestation_file_path, workflow_policy_file_path,
                self.DEFAULT_WORKFLOW_POLICY_DATA_QUERY)
            audit_results = detailed_report

        else:
            step_result.message = "Audit was successful"

        step_result.add_artifact(name='audit-results', value=audit_results)

        return step_result
Exemple #16
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # run the tests
        print("Run unit tests")
        mvn_output_file_path = self.write_working_file('mvn_output.txt')
        try:
            # execute maven step (params come from config)
            self._run_maven_step(mvn_output_file_path=mvn_output_file_path)
        except StepRunnerException as error:
            step_result.success = False
            step_result.message = "Error running maven. " \
                f"More details maybe found in report artifacts: {error}"
        finally:
            step_result.add_artifact(
                description="Standard out and standard error from maven.",
                name='maven-output',
                value=mvn_output_file_path)

        # get test report dir
        test_report_dir = self.__get_test_report_dir()
        if test_report_dir:
            step_result.add_artifact(
                description="Test report generated when running unit tests.",
                name='test-report',
                value=test_report_dir)

            # gather test report evidence
            self._gather_evidence_from_test_report_directory_testsuite_elements(
                step_result=step_result, test_report_dir=test_report_dir)

        # return result
        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)
        rekor_server = self.get_value('rekor-server-url')

        work_dir = self.work_dir_path
        artifact_to_sign_uri = self.get_value(
            self.artifact_to_sign_uri_config_key)
        #Download artifact that needs to be signed and place at work_dir.
        #Path to file is returned as string
        path_to_file = download_source_to_destination(artifact_to_sign_uri,
                                                      work_dir)

        # get the pgp private key to sign the image with
        signer_pgp_private_key = self.get_value('signer-pgp-private-key')

        # import the PGP key and get the finger print
        signer_pgp_private_key_fingerprint = import_pgp_key(
            pgp_private_key=signer_pgp_private_key)

        signer_pgp_public_key = export_pgp_public_key(
            signer_pgp_private_key_fingerprint)

        rekor_entry = self._create_rekor_entry(
            signer_pgp_public_key, signer_pgp_private_key_fingerprint,
            path_to_file, artifact_to_sign_uri)

        rekor_uuid = self._upload_to_rekor(rekor_server, rekor_entry)
        step_result.add_artifact(name='rekor-entry', value=rekor_entry)
        step_result.add_artifact(name='rekor-uuid', value=rekor_uuid)
        rekor_uri = rekor_server + '/api/v1/log/entries/' + rekor_uuid
        step_result.add_artifact(name='rekor-entry-uri', value=rekor_uri)
        return step_result
Exemple #18
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """

        step_result = StepResult.from_step_implementer(self)
        cz_json_path = self.get_value('cz-json')

        repo_root = self.get_value('repo-root')
        repo = Repo(repo_root)
        os.chdir(repo_root)

        with open(cz_json_path, 'rb+') as cz_json:
            cz_json_contents = json.loads(cz_json.read())
            cz_json_contents['commitizen']['version'] = self._get_version_tag(
                repo.tags)
            cz_json.seek(0)
            cz_json.truncate(0)
            cz_json.write(json.dumps(cz_json_contents).encode())

        out = io.StringIO()
        sh.cz.bump(  # pylint: disable=no-member
            '--dry-run',
            '--yes',
            _out=out,
            _err=sys.stderr,
            _tee='err')
        bump_regex = r'tag to create: (\d+.\d+.\d+)'
        version = re.findall(
            bump_regex,
            out.getvalue(),
        )[0]
        step_result.add_artifact(name='app-version', value=version)

        return step_result
Exemple #19
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)
        rekor_server = self.get_value('rekor-server-url')
        extra_data_file = os.path.join(self.work_dir_path,
                                       self.step_name + '.json')
        self.workflow_result.write_results_to_json_file(extra_data_file)
        rekor_entry, rekor_uuid = self.upload_to_rekor(
            rekor_server=rekor_server,
            extra_data_file=extra_data_file,
            signer_pgp_public_key_path=self.get_value(
                'signer-pgp-public-key-path'),
            signer_pgp_private_key_user=self.get_value(
                'signer-pgp-private-key-user'))
        step_result.add_artifact(name='rekor-entry', value=rekor_entry)
        step_result.add_artifact(name='rekor-uuid', value=rekor_uuid)
        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        argocd_result_set = self.get_value('argocd-deployed-manifest')

        if not os.path.exists(argocd_result_set):
            step_result.success = False
            step_result.message = 'File specified in ' \
                                  f'argocd-deployed-manifest {argocd_result_set} not found'
            return step_result

        step_result.add_artifact(
            name='configlint-yml-path',
            value=argocd_result_set
        )
        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        context = self.get_value('context')
        image_spec_file = self.get_value('imagespecfile')
        image_spec_file_location = context + '/' + image_spec_file
        application_name = self.get_value('application-name')
        service_name = self.get_value('service-name')
        tls_verify = self.get_value('tls-verify')

        if not os.path.exists(image_spec_file_location):
            step_result.success = False
            step_result.message = 'Image specification file does not exist in location: ' \
                f'{image_spec_file_location}'
            return step_result

        image_tag_version = self.get_value('container-image-version')
        if image_tag_version is None:
            image_tag_version = 'latest'
            print('No image tag version found in metadata. Using latest')

        destination = "localhost/{application_name}/{service_name}".format(
            application_name=application_name, service_name=service_name)
        tag = "{destination}:{version}".format(destination=destination,
                                               version=image_tag_version)

        try:
            # login to any provider container registries
            # NOTE: important to specify the auth file because depending on the context this is
            #       being run in python process may not have permissions to default location
            containers_config_auth_file = self.get_value(
                'containers-config-auth-file')
            container_registries_login(
                registries=self.get_value('container-registries'),
                containers_config_auth_file=containers_config_auth_file,
                containers_config_tls_verify=tls_verify)

            # perform build
            #
            # NOTE: using --storage-driver=vfs so that container does not need escalated privileges
            #       vfs is less efficient then fuse (which would require host mounts),
            #       but such is the price we pay for security.
            sh.buildah.bud(  # pylint: disable=no-member
                '--storage-driver=vfs',
                '--format=' + self.get_value('format'),
                '--tls-verify=' + str(tls_verify).lower(),
                '--layers',
                '-f',
                image_spec_file,
                '-t',
                tag,
                '--authfile',
                containers_config_auth_file,
                context,
                _out=sys.stdout,
                _err=sys.stderr,
                _tee='err')

            step_result.add_artifact(name='container-image-version', value=tag)
        except sh.ErrorReturnCode as error:  # pylint: disable=undefined-variable
            step_result.success = False
            step_result.message = 'Issue invoking buildah bud with given image ' \
                f'specification file ({image_spec_file}): {error}'
            return step_result

        image_tar_file = f'image-{application_name}-{service_name}-{image_tag_version}.tar'
        image_tar_path = os.path.join(self.work_dir_path, image_tar_file)
        try:
            # Check to see if the tar docker-archive file already exists
            #   this needs to be run as buildah does not support overwritting
            #   existing files.
            #
            # NOTE: using --storage-driver=vfs so that container does not need escalated privileges
            #       vfs is less efficient then fuse (which would require host mounts),
            #       but such is the price we pay for security.
            if os.path.exists(image_tar_path):
                os.remove(image_tar_path)
            sh.buildah.push(  # pylint: disable=no-member
                '--storage-driver=vfs',
                tag,
                "docker-archive:" + image_tar_path,
                _out=sys.stdout,
                _err=sys.stderr,
                _tee='err')

            step_result.add_artifact(name='image-tar-file',
                                     value=image_tar_path)
        except sh.ErrorReturnCode as error:  # pylint: disable=undefined-variable
            step_result.success = False
            step_result.message = f'Issue invoking buildah push to tar file ' \
                f'({image_tar_path}): {error}'
            return step_result

        return step_result
Exemple #22
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # get the pgp private key to sign the image with
        signer_pgp_private_key = self.get_value(
            ['signer-pgp-private-key', 'container-image-signer-pgp-private-key']
        )

        # get the uri to the image to sign
        container_image_tag = self.get_value(['container-image-push-tag', 'container-image-tag'])

        image_signatures_directory = self.create_working_dir_sub_dir(
            sub_dir_relative_path='image-signature'
        )
        try:
            # import the PGP key and get the finger print
            signer_pgp_private_key_fingerprint = import_pgp_key(
                pgp_private_key=signer_pgp_private_key
            )
            step_result.add_artifact(
                name='container-image-signature-signer-private-key-fingerprint',
                value=signer_pgp_private_key_fingerprint
            )

            # login to any provider container registries
            # NOTE 1: can not use auth file, even though we want to, because podman image sign
            #         does not accept authfile.
            #         https://github.com/containers/podman/issues/10866
            # NOTE 2: have to force login to use podman because even though logging in with
            #         any of the tools should work, in testing the podman sign only worked
            #         from within the python virtual environment if the login happened with podman.
            container_registries_login(
                registries=self.get_value('container-registries'),
                containers_config_tls_verify=util.strtobool(self.get_value('src-tls-verify')),
                container_command_short_name='podman'
            )

            # sign the image
            signature_file_path = PodmanSign.__sign_image(
                pgp_private_key_fingerprint=signer_pgp_private_key_fingerprint,
                image_signatures_directory=image_signatures_directory,
                container_image_tag=container_image_tag
            )
            step_result.add_artifact(
                name='container-image-signed-tag',
                value=container_image_tag,
            )
            step_result.add_artifact(
                name='container-image-signature-file-path',
                value=signature_file_path,
            )
            signature_name = os.path.relpath(signature_file_path, image_signatures_directory)
            step_result.add_artifact(
                name='container-image-signature-name',
                value=signature_name
            )

             # upload the image signature
            container_image_signature_destination_url = self.get_value(
                'container-image-signature-destination-url'
            )
            if container_image_signature_destination_url:
                container_image_signature_destination_uri = \
                    f"{container_image_signature_destination_url}/{signature_name}"
                step_result.add_artifact(
                    name='container-image-signature-uri',
                    description='URI of the uploaded container image signature',
                    value=container_image_signature_destination_uri
                )

                upload_result = upload_file(
                    file_path=signature_file_path,
                    destination_uri=container_image_signature_destination_uri,
                    username=self.get_value('container-image-signature-destination-username'),
                    password=self.get_value('container-image-signature-destination-password')
                )
                step_result.add_artifact(
                    name='container-image-signature-upload-results',
                    description='Results of uploading the container image signature' \
                                ' to the given destination.',
                    value=upload_result
                )
        except (RuntimeError, StepRunnerException) as error:
            step_result.success = False
            step_result.message = str(error)

        return step_result
    def _run_step(self):  # pylint: disable=too-many-locals
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        pom_file = self.get_value('pom-file')
        artifact_extensions = self.get_value('artifact-extensions')
        artifact_parent_dir = self.get_value('artifact-parent-dir')

        if not os.path.exists(pom_file):
            step_result.success = False
            step_result.message = f'Given pom file does not exist: {pom_file}'
            return step_result

        settings_file = self._generate_maven_settings()
        mvn_output_file_path = self.write_working_file('mvn_test_output.txt')
        try:
            with open(mvn_output_file_path, 'w') as mvn_output_file:
                out_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                    [sys.stdout, mvn_output_file])
                err_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                    [sys.stderr, mvn_output_file])

                sh.mvn(  # pylint: disable=no-member
                    'clean',
                    'install',
                    '-f',
                    pom_file,
                    '-s',
                    settings_file,
                    _out=out_callback,
                    _err=err_callback)
        except sh.ErrorReturnCode as error:
            step_result.success = False
            step_result.message = "Package failures. See 'maven-output' report artifacts " \
                f"for details: {error}"
            return step_result
        finally:
            step_result.add_artifact(
                description=
                "Standard out and standard error from 'mvn install'.",
                name='maven-output',
                value=mvn_output_file_path)

        # find the artifacts
        artifact_file_names = []
        artifact_parent_dir_full_path = \
            os.listdir(os.path.join(
                os.path.dirname(os.path.abspath(pom_file)),
                artifact_parent_dir))
        for filename in artifact_parent_dir_full_path:
            if any(filename.endswith(ext) for ext in artifact_extensions):
                artifact_file_names.append(filename)

        # error if we find more then one artifact
        # see https://projects.engineering.redhat.com/browse/NAPSSPO-546
        if len(artifact_file_names) > 1:
            step_result.success = False
            step_result.message = 'pom resulted in multiple artifacts with expected artifact ' \
                                  f'extensions ({artifact_extensions}), this is unsupported'
            return step_result

        if len(artifact_file_names) < 1:
            step_result.success = False
            step_result.message = 'pom resulted in 0 with expected artifact extensions ' \
                                  f'({artifact_extensions}), this is unsupported'
            return step_result

        artifact_id = get_xml_element(pom_file, 'artifactId').text
        group_id = get_xml_element(pom_file, 'groupId').text
        try:
            package_type = get_xml_element(pom_file, 'package').text
        except ValueError:
            package_type = 'jar'

        package_artifacts = {
            'path':
            os.path.join(os.path.dirname(os.path.abspath(pom_file)),
                         artifact_parent_dir, artifact_file_names[0]),
            'artifact-id':
            artifact_id,
            'group-id':
            group_id,
            'package-type':
            package_type,
            'pom-path':
            pom_file
        }

        # Currently, package returns ONE 'artifact', eg: one war file
        # However, in the future, an ARRAY could be returned, eg: several jar files
        step_result.add_artifact(name='package-artifacts',
                                 value=[package_artifacts])

        return step_result
Exemple #24
0
 def _run_step(self):
     step_result = StepResult.from_step_implementer(self)
     return step_result
Exemple #25
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # configlint-yml-path is required
        configlint_yml_path = self.get_value('configlint-yml-path')

        if not os.path.exists(configlint_yml_path):
            step_result.success = False
            step_result.message = 'File specified in ' \
                                  f'configlint-yml-path not found: {configlint_yml_path}'
            return step_result

        # Required: rules and exists
        rules_file = self.get_value('rules')
        if not os.path.exists(rules_file):
            step_result.success = False
            step_result.message = f'File specified in rules not found: {rules_file}'
            return step_result

        configlint_results_file_path = self.write_working_file(
            'configlint_results_file.txt')
        try:
            # run config-lint writing stdout and stderr to the standard streams
            # as well as to a results file.
            with open(configlint_results_file_path, 'w', encoding='utf-8') \
                    as configlint_results_file:
                out_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                    [sys.stdout, configlint_results_file])
                err_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                    [sys.stderr, configlint_results_file])

                sh.config_lint(  # pylint: disable=no-member
                    "-verbose",
                    "-debug",
                    "-rules",
                    rules_file,
                    configlint_yml_path,
                    _encoding='UTF-8',
                    _out=out_callback,
                    _err=err_callback,
                    _tee='err')
        except sh.ErrorReturnCode_255:  # pylint: disable=no-member
            # NOTE: expected failure condition,
            #       aka, the config lint run, but found an issue
            #       stderr/stdout is captured in configlint_results_file_path
            step_result.success = False
            step_result.message = 'Failed config-lint scan.'
        except sh.ErrorReturnCode:
            # NOTE: un-expected failure condition
            #       aka, the config lint failed to run for some reason
            #       stderr/stdout is captured in configlint_results_file_path
            step_result.success = False
            step_result.message = 'Unexpected Error invoking config-lint.'

        step_result.add_artifact(name='configlint-result-set',
                                 value=configlint_results_file_path)
        step_result.add_artifact(name='configlint-yml-path',
                                 value=configlint_yml_path)
        return step_result
Exemple #26
0
 def _run_step(self):
     step_result = StepResult.from_step_implementer(self)
     step_result.success = False
     return step_result
Exemple #27
0
    def run_step(self):
        """Wrapper for running the implemented step.

        Returns
        -------
        StepResult
            Results of running this step.
        """

        StepImplementer.__print_section_title(f"Step Start - {self.step_name}")

        # print information about the configuration
        StepImplementer.__print_section_title(
            f"Configuration - {self.step_name} - {self.sub_step_name}",
            div_char="-",
            indent=1)
        StepImplementer.__print_data(
            "Step Implementer Configuration Defaults",
            ConfigValue.convert_leaves_to_values(
                self.step_implementer_config_defaults()))
        StepImplementer.__print_data(
            "Global Configuration Defaults",
            ConfigValue.convert_leaves_to_values(self.global_config_defaults))
        StepImplementer.__print_data(
            "Global Environment Configuration Defaults",
            ConfigValue.convert_leaves_to_values(
                self.global_environment_config_defaults))
        StepImplementer.__print_data(
            "Step Configuration",
            ConfigValue.convert_leaves_to_values(self.step_config))
        StepImplementer.__print_data(
            "Step Environment Configuration",
            ConfigValue.convert_leaves_to_values(self.step_environment_config))
        StepImplementer.__print_data(
            "Step Configuration Runtime Overrides",
            ConfigValue.convert_leaves_to_values(self.step_config_overrides))

        # create the munged runtime step configuration and print
        copy_of_runtime_step_config = self.get_copy_of_runtime_step_config()
        StepImplementer.__print_data(
            "Runtime Step Configuration",
            ConfigValue.convert_leaves_to_values(copy_of_runtime_step_config))

        step_result = None
        try:
            # validate the runtime step configuration
            self._validate_required_config_or_previous_step_result_artifact_keys(
            )

            # run the step
            StepImplementer.__print_section_title(
                f"Standard Out - {self.step_name}", div_char="-", indent=1)

            indented_stdout = TextIOIndenter(parent_stream=sys.stdout,
                                             indent_level=2)
            indented_stderr = TextIOIndenter(parent_stream=sys.stderr,
                                             indent_level=2)

            with redirect_stdout(indented_stdout), redirect_stderr(
                    indented_stderr):
                step_result = self._run_step()
        except AssertionError as invalid_error:
            step_result = StepResult.from_step_implementer(self)
            step_result.success = False
            step_result.message = str(invalid_error)

        # print the step run results
        StepImplementer.__print_section_title(f"Results - {self.step_name}",
                                              div_char="-",
                                              indent=1)

        StepImplementer.__print_data('Environment', step_result.environment)
        StepImplementer.__print_data('Step', step_result.step_name)
        StepImplementer.__print_data('Sub Step', step_result.sub_step_name)
        StepImplementer.__print_data('Sub Step Implementer',
                                     step_result.sub_step_implementer_name)
        StepImplementer.__print_data('Success', step_result.success)
        StepImplementer.__print_data('Message', step_result.message)
        StepImplementer.__print_data('Artifacts', step_result.artifacts_dicts)

        StepImplementer.__print_section_title(f'Step End - {self.step_name}')

        return step_result
Exemple #28
0
    def _run_step(self):  # pylint: disable=too-many-locals
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # Get config items
        maven_push_artifact_repo_id = self.get_value(
            'maven-push-artifact-repo-id')
        maven_push_artifact_repo_url = self.get_value(
            'maven-push-artifact-repo-url')
        version = self.get_value('version')

        # push the artifacts
        mvn_update_version_output_file_path = self.write_working_file(
            'mvn_versions_set_output.txt')
        mvn_push_artifacts_output_file_path = self.write_working_file(
            'mvn_deploy_output.txt')
        try:
            # update the version before pushing
            # NOTE 1: we know this is weird. But the version in the pom isn't necessarily
            #         the version that was calculated as part of the release and so we need
            #         to update that before doing the maven deploy so the maven deploy will
            #         use the new version.
            #
            # NOTE 2: we tried doing this in the same command as the deploy,
            #         but the pom was already loaded so even though the xml was updated
            #         the deploy still used the old version, hence having to run this
            #         first and independently.
            print("Update maven package version")
            run_maven(mvn_output_file_path=mvn_update_version_output_file_path,
                      settings_file=self.maven_settings_file,
                      pom_file=self.get_value('pom-file'),
                      phases_and_goals=['versions:set'],
                      additional_arguments=[f'-DnewVersion={version}'])

            # execute maven step (params come from config)
            print("Push packaged maven artifacts")
            self._run_maven_step(
                mvn_output_file_path=mvn_push_artifacts_output_file_path,
                step_implementer_additional_arguments=[
                    '-DaltDeploymentRepository=' \
                    f'{maven_push_artifact_repo_id}::default::{maven_push_artifact_repo_url}'
                ]
            )
        except StepRunnerException as error:
            step_result.success = False
            step_result.message = "Error running 'maven deploy' to push artifacts. " \
                f"More details maybe found in 'maven-output' report artifact: {error}"
        finally:
            step_result.add_artifact(
                description=
                "Standard out and standard error from running maven to update version.",
                name='maven-update-version-output',
                value=mvn_update_version_output_file_path)
            step_result.add_artifact(
                description="Standard out and standard error from running maven to " \
                    "push artifacts to repository.",
                name='maven-push-artifacts-output',
                value=mvn_push_artifacts_output_file_path
            )

        return step_result
Exemple #29
0
    def _run_step(self):  # pylint: disable=too-many-locals
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # Get config items
        maven_push_artifact_repo_id = self.get_value(
            'maven-push-artifact-repo-id')
        maven_push_artifact_repo_url = self.get_value(
            'maven-push-artifact-repo-url')
        version = self.get_value('version')
        package_artifacts = self.get_value('package-artifacts')
        tls_verify = self.get_value('tls-verify')

        # disable tls verification
        mvn_additional_options = []
        if not tls_verify:
            mvn_additional_options += [
                '-Dmaven.wagon.http.ssl.insecure=true',
                '-Dmaven.wagon.http.ssl.allowall=true',
                '-Dmaven.wagon.http.ssl.ignore.validity.dates=true',
            ]

        # Create settings.xml
        settings_file = self._generate_maven_settings()

        # push the artifacts
        push_artifacts = []
        mvn_output_file_path = self.write_working_file('mvn_test_output.txt')
        try:
            for package in package_artifacts:
                artifact_path = package['path']
                group_id = package['group-id']
                artifact_id = package['artifact-id']
                package_type = package['package-type']

                # push the artifact
                with open(mvn_output_file_path, 'a') as mvn_output_file:
                    out_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                        [sys.stdout, mvn_output_file])
                    err_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                        [sys.stderr, mvn_output_file])
                    sh.mvn(  # pylint: disable=no-member
                        'deploy:deploy-file',
                        '-Dversion=' + version,
                        '-Dfile=' + artifact_path,
                        '-DgroupId=' + group_id,
                        '-DartifactId=' + artifact_id,
                        '-Dpackaging=' + package_type,
                        '-Durl=' + maven_push_artifact_repo_url,
                        '-DrepositoryId=' + maven_push_artifact_repo_id,
                        '-s' + settings_file,
                        *mvn_additional_options,
                        _out=out_callback,
                        _err=err_callback)

                # record the pushed artifact
                push_artifacts.append({
                    'artifact-id': artifact_id,
                    'group-id': group_id,
                    'version': version,
                    'path': artifact_path,
                    'packaging': package_type,
                })
        except sh.ErrorReturnCode as error:
            step_result.success = False
            step_result.message = "Push artifacts failures. See 'maven-output' report artifacts " \
                f"for details: {error}"

        step_result.add_artifact(
            description="Standard out and standard error from 'mvn install'.",
            name='maven-output',
            value=mvn_output_file_path)
        step_result.add_artifact(name='push-artifacts', value=push_artifacts)
        return step_result
    def _run_step(self):  # pylint: disable=too-many-locals
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # get input
        deployment_config_repo = self.get_value('deployment-config-repo')
        deployment_config_repo_branch = ArgoCD.__get_repo_branch()
        deployment_config_helm_chart_path = self.get_value('deployment-config-helm-chart-path')
        deployment_config_destination_cluster_uri = self.get_value('kube-api-uri')
        deployment_config_destination_cluster_token = self.get_value('kube-api-token')
        deployment_config_helm_chart_environment_values_file = \
            self.__get_deployment_config_helm_chart_environment_values_file()
        deployment_config_helm_chart_values_file_image_tag_yq_path = \
            self.get_value('deployment-config-helm-chart-values-file-image-tag-yq-path')
        deployment_config_helm_chart_additional_value_files = \
            self.get_value('deployment-config-helm-chart-additional-values-files')
        container_image_tag = self.get_value('container-image-tag')
        force_push_tags = self.get_value('force-push-tags')

        try:
            argocd_app_name = self.__get_app_name()
            step_result.add_artifact(
                name='argocd-app-name',
                value=argocd_app_name
            )

            # clone the configuration repository
            print("Clone the configuration repository")
            clone_repo_dir_name = 'deployment-config-repo'
            deployment_config_repo_dir = ArgoCD.__clone_repo(
                repo_dir=self.create_working_dir_sub_dir(clone_repo_dir_name),
                repo_url=deployment_config_repo,
                repo_branch=deployment_config_repo_branch,
                git_email=self.get_value('git-email'),
                git_name=self.get_value('git-name'),
                username = self.get_value('git-username'),
                password = self.get_value('git-password')
            )

            # update values file, commit it, push it, and tag it
            print("Update the environment values file")
            deployment_config_helm_chart_environment_values_file_path = os.path.join(
                deployment_config_repo_dir,
                deployment_config_helm_chart_path,
                deployment_config_helm_chart_environment_values_file
            )
            self.__update_yaml_file_value(
                file=deployment_config_helm_chart_environment_values_file_path,
                yq_path=deployment_config_helm_chart_values_file_image_tag_yq_path,
                value=container_image_tag
            )
            print("Commit the updated environment values file")
            ArgoCD.__git_commit_file(
                git_commit_message=f'Updating values for deployment to {self.environment}',
                file_path=os.path.join(
                    deployment_config_helm_chart_path,
                    deployment_config_helm_chart_environment_values_file
                ),
                repo_dir=deployment_config_repo_dir
            )
            print("Tag and push the updated environment values file")
            deployment_config_repo_tag = self.__get_deployment_config_repo_tag()
            self.__git_tag_and_push_deployment_config_repo(
                deployment_config_repo=deployment_config_repo,
                deployment_config_repo_dir=deployment_config_repo_dir,
                deployment_config_repo_tag=deployment_config_repo_tag,
                force_push_tags=force_push_tags
            )
            step_result.add_artifact(
                name='config-repo-git-tag',
                value=deployment_config_repo_tag
            )

            # create/update argocd app and sync it
            print("Sign into ArgoCD")
            ArgoCD.__argocd_sign_in(
                argocd_api=self.get_value('argocd-api'),
                username=self.get_value('argocd-username'),
                password=self.get_value('argocd-password'),
                insecure=self.get_value('argocd-skip-tls')
            )
            print("Add target cluster to ArgoCD")
            self.__argocd_add_target_cluster(
                kube_api=deployment_config_destination_cluster_uri,
                kube_api_token=deployment_config_destination_cluster_token,
                kube_api_skip_tls=self.get_value('kube-api-skip-tls')
            )
            print(f"Create or update ArgoCD Application ({argocd_app_name})")
            argocd_values_files = []
            argocd_values_files += deployment_config_helm_chart_additional_value_files
            argocd_values_files += [deployment_config_helm_chart_environment_values_file]
            ArgoCD.__argocd_app_create_or_update(
                argocd_app_name=argocd_app_name,
                repo=deployment_config_repo,
                revision=deployment_config_repo_branch,
                path=deployment_config_helm_chart_path,
                dest_server=deployment_config_destination_cluster_uri,
                auto_sync=self.get_value('argocd-auto-sync'),
                values_files=argocd_values_files
            )

            # sync and wait for the sync of the ArgoCD app
            print(f"Sync (and wait for) ArgoCD Application ({argocd_app_name})")
            ArgoCD.__argocd_app_sync(
                argocd_app_name=argocd_app_name,
                argocd_sync_timeout_seconds=self.get_value('argocd-sync-timeout-seconds')
            )

            # get the ArgoCD app manifest that was synced
            print(f"Get ArgoCD Application ({argocd_app_name}) synced manifest")
            arogcd_app_manifest_file = self.__argocd_get_app_manifest(
                argocd_app_name=argocd_app_name
            )
            step_result.add_artifact(
                name='argocd-deployed-manifest',
                value=arogcd_app_manifest_file
            )

            # determine the deployed host URLs
            print(
                "Determine the deployed host URLs for the synced"
                f" ArgoCD Application (({argocd_app_name})"
            )
            deployed_host_urls = ArgoCD.__get_deployed_host_urls(
                manifest_path=arogcd_app_manifest_file
            )
            step_result.add_artifact(
                name='deployed-host-urls',
                value=deployed_host_urls
            )
        except StepRunnerException as error:
            step_result.success = False
            step_result.message = f"Error deploying to environment ({self.environment}):" \
                f" {str(error)}"

        return step_result