示例#1
0
def run_npm(npm_output_file_path, npm_args):
    """
    Run an npm command

    Parameters
    ----------
    npm_output_file_path:
        String
    npm_args:
        Commandline arguments to npm
    """
    try:
        with open(npm_output_file_path, 'w',
                  encoding='utf-8') as npm_output_file:
            out_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                [sys.stdout, npm_output_file])
            err_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                [sys.stderr, npm_output_file])

            sh.npm(  # pylint: disable=no-member
                npm_args,
                _out=out_callback,
                _err=err_callback)
    except sh.ErrorReturnCode as error:
        raise StepRunnerException(f"Error running npm. {error}") from error
示例#2
0
def run_tox(tox_output_file_path, tox_args):
    """
    Run a tox command

    Paramters
    ---------
    tox_output_file_path:
        String
    tox_args:
        Commandline arguments to tox
    """

    try:
        with open(tox_output_file_path, 'w', encoding='utf-8') as tox_output_file:
            out_callback = create_sh_redirect_to_multiple_streams_fn_callback([
                sys.stdout,
                tox_output_file
            ])
            err_callback = create_sh_redirect_to_multiple_streams_fn_callback([
                sys.stderr,
                tox_output_file
            ])

            sh.tox( # pylint: disable=no-member
                tox_args,
                _out=out_callback,
                _err=err_callback
            )
    except sh.ErrorReturnCode as error:
        raise StepRunnerException(
            f"Error running tox. {error}"
        ) from error
示例#3
0
    def _argocd_app_wait_for_health(argocd_app_name, argocd_timeout_seconds):
        """Waits for ArgoCD Application to reach Healthy state.

        Parameters
        ----------
        argocd_app_name : str
            Name of ArgoCD Application to wait for Healthy state of.
        argocd_timeout_seconds : int
            Number of sections to wait before timing out waiting for Healthy state.

        Raises
        ------
        StepRunnerException
            If error (including timeout) waiting for existing ArgoCD Application Healthy state.
            If ArgoCD Application transitions from Healthy to Degraded while waiting for Healthy
            state.
        """
        for wait_for_health_retry in range(
                ArgoCDGeneric.MAX_ATTEMPT_TO_WAIT_FOR_ARGOCD_OP_RETRIES):
            argocd_output_buff = StringIO()
            try:
                print(
                    f"Wait for Healthy ArgoCD Application ({argocd_app_name}")
                out_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                    [sys.stdout, argocd_output_buff])
                err_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                    [sys.stderr, argocd_output_buff])
                sh.argocd.app.wait(  # pylint: disable=no-member
                    argocd_app_name,
                    '--health',
                    '--timeout',
                    argocd_timeout_seconds,
                    _out=out_callback,
                    _err=err_callback)
                break
            except sh.ErrorReturnCode as error:
                # if error waiting for Healthy state because entered Degraded state
                # while waiting for Healthy state
                # try again to wait for Healthy state assuming that on next attempt the
                # new degradation of Health will resolve itself.
                #
                # NOTE: this can happen based on bad timing if for instance an
                #       HorizontalPodAutoscaller doesn't enter Degraded state until after we are
                #       already waiting for the ArgoCD Application to enter Healthy state,
                #       but then the HorizontalPodAutoscaller will, after a time, become Healthy.
                if re.match(
                        ArgoCDGeneric.
                        ARGOCD_HEALTH_STATE_TRANSITIONED_FROM_HEALTHY_TO_DEGRADED,
                        argocd_output_buff.getvalue()):
                    print(
                        f"ArgoCD Application ({argocd_app_name}) entered Degraded state"
                        " while waiting for it to enter Healthy state."
                        f" Try ({wait_for_health_retry} out of"
                        f" {ArgoCDGeneric.MAX_ATTEMPT_TO_WAIT_FOR_ARGOCD_OP_RETRIES}) again to"
                        " wait for Healthy state.")
                else:
                    raise StepRunnerException(
                        f"Error waiting for Healthy ArgoCD Application ({argocd_app_name}): {error}"
                    ) from error
示例#4
0
    def upload_to_rekor(self, rekor_server, extra_data_file,
                        signer_pgp_public_key_path,
                        signer_pgp_private_key_user):
        """TODO: function will be refactored in next release. will doc then.
        """
        rekor_entry = Rekor.create_rekor_entry(
            signer_pgp_public_key_path=signer_pgp_public_key_path,
            signer_pgp_private_key_user=signer_pgp_private_key_user,
            extra_data_file=extra_data_file)

        rekor_entry_path = self.write_working_file(filename='entry.json',
                                                   contents=bytes(
                                                       json.dumps(rekor_entry),
                                                       'utf-8'))

        rekor_upload_stdout_result = StringIO()
        rekor_upload_stdout_callback = create_sh_redirect_to_multiple_streams_fn_callback(
            [sys.stdout, rekor_upload_stdout_result])
        rekor = sh.rekor(  # pylint: disable=no-member
            'upload',
            '--rekor_server',
            rekor_server,
            '--entry',
            rekor_entry_path,
            _out=rekor_upload_stdout_callback,
            _err_to_out=True,
            _tee='out')
        rekor_uuid = str(rekor).split('/')[-1].strip(' \n')
        return rekor_entry, rekor_uuid
示例#5
0
    def test_one_stream(self):
        stream_one = StringIO()
        sh_redirect_to_multiple_streams_fn_callback = \
            create_sh_redirect_to_multiple_streams_fn_callback([
                stream_one
            ])

        sh_redirect_to_multiple_streams_fn_callback('data1')

        self.assertEqual('data1', stream_one.getvalue())
    def __curl_file(
            container_image_signature_file_path,
            container_image_signature_name,
            signature_server_url,
            signature_server_username,
            signature_server_password
    ):
        """Sends the signature file

        Raises
        ------
        StepRunnerException
            If error pushing image signature.
        """
        # remove any trailing / from url
        signature_server_url = re.sub(r'/$', '', signature_server_url)
        container_image_signature_url = f"{signature_server_url}/{container_image_signature_name}"

        # calculate hashes
        with open(container_image_signature_file_path, 'rb') as container_image_signature_file:
            container_image_signature_file_contents = container_image_signature_file.read()
            signature_file_md5 = hashlib.md5(container_image_signature_file_contents).hexdigest()
            signature_file_sha1 = hashlib.sha1(container_image_signature_file_contents).hexdigest()

        try:
            stdout_result = StringIO()
            stdout_callback = create_sh_redirect_to_multiple_streams_fn_callback([
                sys.stdout,
                stdout_result
            ])

            # -s: Silent
            # -S: Show error
            # -f: Don't print out failure document
            # -v: Verbose
            sh.curl(  # pylint: disable=no-member
                '-sSfv',
                '-X', 'PUT',
                '--header', f'X-Checksum-Sha1:{signature_file_sha1}',
                '--header', f'X-Checksum-MD5:{signature_file_md5}',
                '--user', f"{signature_server_username}:{signature_server_password}",
                '--upload-file', container_image_signature_file_path,
                container_image_signature_url,
                _out=stdout_callback,
                _err_to_out=True,
                _tee='out'
            )
        except sh.ErrorReturnCode as error:
            raise StepRunnerException(
                f"Error pushing signature file to signature server using curl: {error}"
            ) from error

        return container_image_signature_url, signature_file_md5, signature_file_sha1
示例#7
0
def create_container_from_image(
    image_address,
    repository_type='container-storage:'
):
    """Import a container image using buildah form a TAR file.

    Parameters
    ----------
    image_address : str
        Image tag to create a container from.
        ex:
        * localhost/my-app:latest
        * quay.io/my-org/my-app:latest
        * docker-archive:/local/path/to/my-app-container-image.tar
    container_name : str
        name for the working container.
    repository_type : str
        The type of repository to mount the given image tag from.
        See https://github.com/containers/skopeo for details on different repository types.

    Returns
    -------
    str
        Name of the imported container.

    Raises
    ------
    RuntimeError
        If error importing image.
    """
    container_name = None
    try:
        buildah_from_out_buff = StringIO()
        buildah_from_out_callback = create_sh_redirect_to_multiple_streams_fn_callback([
            sys.stdout,
            buildah_from_out_buff
        ])
        sh.buildah(  # pylint: disable=no-member
            'from',
            f"{repository_type}{image_address}",
            _out=buildah_from_out_callback,
            _err=sys.stderr,
            _tee='err'
        )
        container_name = buildah_from_out_buff.getvalue().rstrip()
    except sh.ErrorReturnCode as error:
        raise RuntimeError(
            f'Error creating container from image ({image_address}): {error}'
        ) from error

    return container_name
示例#8
0
def run_npm(npm_output_file_path, npm_args, npm_envs=None):
    """
    Run an npm command

    Parameters
    ----------
    npm_output_file_path:
        String
    npm_args:
        Commandline arguments to npm
    npm_envs:
        Dictionary representing additional environment variables
    """
    try:
        with open(npm_output_file_path, 'w',
                  encoding='utf-8') as npm_output_file:
            out_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                [sys.stdout, npm_output_file])
            err_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                [sys.stderr, npm_output_file])

            if npm_envs:
                new_env = os.environ.copy()
                new_env.update(npm_envs)

                sh.npm(  # pylint: disable=no-member
                    npm_args,
                    _env=new_env,
                    _out=out_callback,
                    _err=err_callback)
            else:
                sh.npm(  # pylint: disable=no-member
                    npm_args,
                    _out=out_callback,
                    _err=err_callback)
    except sh.ErrorReturnCode as error:
        raise StepRunnerException(f"Error running npm. {error}") from error
示例#9
0
    def __audit_attestation(
            self,  # pylint: disable=no-self-use
            workflow_attestation_file_path,
            workflow_policy_file_path,
            workflow_policy_query):
        """Method to run the opa shell command to evaluate a data file with a provided
           query file and given query.

        Parameters
        ----------
        workflow_attestation_file_path
            File path to the workflow attestation file
        workflow_policy_file_path
            File path to the workflow policy file
        workflow_policy_query
            Query that is run against the policy file

        Returns
        -------
        string
            Output of the opa shell command. This is more important
            to output if the query fails.
        int
            Return code for opa shell command. A value of 0
            means the query succeeeded whereas a value of
            1 means the query failed.
        """

        opa_attestation_stdout_result = StringIO()
        opa_attestation_stdout_callback = create_sh_redirect_to_multiple_streams_fn_callback(
            [sys.stdout, opa_attestation_stdout_result])

        try:
            sh.opa(  # pylint: disable=no-member
                'eval',
                '--fail-defined',
                '-d',
                workflow_attestation_file_path,
                '-i',
                workflow_policy_file_path,
                workflow_policy_query,
                _out=opa_attestation_stdout_callback,
                _err_to_out=True,
                _tee='out')

        except sh.ErrorReturnCode as error:
            return f"Error evaluating query against data:  {error}", 1

        return 'Audit was successful', 0
示例#10
0
    def __verify_sig(
        self,
        signature_file_path,
        artifact_file_path,
        private_key_fingerprint,
    ):
        # GPG_OUTPUT_REGEX = re.compile(r"using RSA key ([A-Za-z0-9]+).*(Good signature)", re.DOTALL)
        GPG_OUTPUT_REGEX = re.compile(f"using RSA key {private_key_fingerprint}.*Good signature", re.DOTALL)

        with TempDirectory() as temp_dir:
            signature_file_path=Path(signature_file_path)

            try:
                stdout_result = StringIO()
                stdout_callback = create_sh_redirect_to_multiple_streams_fn_callback([
                    sys.stdout,
                    stdout_result
                ])

                sh.gpg( 
                    '--verify', signature_file_path,
                    artifact_file_path,
                    _out=stdout_callback,
                    _err_to_out=True,
                    _tee='out'
                )

                verify_matches = re.findall(
                    GPG_OUTPUT_REGEX,
                    stdout_result.getvalue()
                )

                if len(verify_matches) < 1:
                    return False

            except sh.ErrorReturnCode as error:
                print(
                    f"Error verifying sig with gpg: {error}"
                )
                return False
            
            # if here, then verification successful
            return True
示例#11
0
def mount_container(buildah_unshare_command, container_id):
    """Use buildah to mount a container.

    Parameters
    ----------
    buildah_unshare_command : sh.buildah.unshare.bake()
        A baked sh.buildah.unshare command to use to run this command in the context off
        so that this can be done "rootless".
    container_id : str
        ID of the container to mount.

    Returns
    -------
    str
        Absolute path to the mounted container.

    Raises
    ------
    RuntimeError
        If error mounting the container.
    """
    mount_path = None
    try:
        buildah_mount_out_buff = StringIO()
        buildah_mount_out_callback = create_sh_redirect_to_multiple_streams_fn_callback([
            sys.stdout,
            buildah_mount_out_buff
        ])
        buildah_mount_command = buildah_unshare_command.bake("buildah", "mount")
        buildah_mount_command(
            container_id,
            _out=buildah_mount_out_callback,
            _err=sys.stderr,
            _tee='err'
        )
        mount_path = buildah_mount_out_buff.getvalue().rstrip()
    except sh.ErrorReturnCode as error:
        raise RuntimeError(
            f'Error mounting container ({container_id}): {error}'
        ) from error

    return mount_path
    def __import_pgp_key(pgp_private_key):
        print("Import PGP private key to sign container image(s) with")
        try:
            # import the key

            # NOTE: GPG is weird in that it sends "none error" output to stderr even on success...
            #       so merge the stderr into stdout
            gpg_import_stdout_result = StringIO()
            gpg_import_stdout_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                [sys.stdout, gpg_import_stdout_result])
            sh.gpg(  # pylint: disable=no-member
                '--import',
                '--fingerprint',
                '--with-colons',
                '--import-options=import-show',
                _in=pgp_private_key,
                _out=gpg_import_stdout_callback,
                _err_to_out=True,
                _tee='out')

            # get the fingerprint of the imported key
            #
            # NOTE: if more then one match just using first one...
            gpg_imported_pgp_private_key_fingerprints = re.findall(
                PodmanSign.GPG_IMPORT_FINGER_PRINT_REGEX,
                gpg_import_stdout_result.getvalue())
            if len(gpg_imported_pgp_private_key_fingerprints) < 1:
                raise StepRunnerException(
                    "Error getting PGP fingerprint for PGP key"
                    " to sign container image(s) with. See stdout and stderr for more info."
                )
            pgp_private_key_fingerprint = gpg_imported_pgp_private_key_fingerprints[
                0]

            print("Imported PGP private key to sign container image(s) with: "
                  f"fingerprint='{pgp_private_key_fingerprint}'")
        except sh.ErrorReturnCode as error:
            raise StepRunnerException(
                f"Error importing pgp private key: {error}") from error

        return pgp_private_key_fingerprint
    def _upload_to_rekor(self, rekor_server, rekor_entry):
        """Method to upload a rekor entry to provided rekor server

        Parameters
        ----------
        rekor_server: str
            URL to rekor server
        signer_pgp_private_key_fingerprint: str
            PGP fingerprint obtained from importing the private key
        path_to_file: str
            Path to file to be signed
        artifact_to_sign_uri: str
            URI where artifact was pulled from

        Returns
        -------
        str:
            Returns rekor uuid returned from upload command

        """

        rekor_entry_path = self.write_working_file(filename='entry.json',
                                                   contents=bytes(
                                                       json.dumps(rekor_entry),
                                                       'utf-8'))
        rekor_upload_stdout_result = StringIO()
        rekor_upload_stdout_callback = create_sh_redirect_to_multiple_streams_fn_callback(
            [sys.stdout, rekor_upload_stdout_result])
        rekor = sh.rekor(  # pylint: disable=no-member
            'upload',
            '--rekor_server',
            rekor_server,
            '--entry',
            rekor_entry_path,
            _out=rekor_upload_stdout_callback,
            _err_to_out=True,
            _tee='out')
        rekor_uuid = str(rekor).rsplit('/', maxsplit=1)[-1].strip(' \n')
        return rekor_uuid
示例#14
0
def import_pgp_key(pgp_private_key):
    """Imports a PGP key.

    Parameters
    ----------
    pgp_private_key : str
        PGP key to import.

    Returns
    -------
    str
        Fingerprint of the imported PGP key.

    Raises
    ------
    RuntimeError
        If error getting PGP fingerprint for imported PGP key
        If error importing PGP key.
    """
    # Example input to match on:
    #   sec:-:3072:1:CF4AC14A3D109637:1601483310:1664555310::-:::scESC::::::23::0:
    #   fpr:::::::::DD7208BA0A6359F65B906B29CF4AC14A3D109637:
    #   grp:::::::::A483EE079EC1D58A954E3AAF3BCC61EDD7596BF0:
    gpg_regex = re.compile(r"^fpr:+([^:]+):$", re.MULTILINE)

    print("Import PGP private key to sign artifacts with")
    try:
        # import the key

        # NOTE: GPG is weird in that it sends "none error" output to stderr even on success...
        #       so merge the stderr into stdout
        gpg_import_stdout_result = StringIO()
        gpg_import_stdout_callback = create_sh_redirect_to_multiple_streams_fn_callback([
            sys.stdout,
            gpg_import_stdout_result
        ])
        sh.gpg( # pylint: disable=no-member
            '--import',
            '--fingerprint',
            '--with-colons',
            '--import-options=import-show',
            _in=pgp_private_key,
            _out=gpg_import_stdout_callback,
            _err_to_out=True,
            _tee='out'
        )

        # get the fingerprint of the imported key
        #
        # NOTE: if more then one match just using first one...
        gpg_imported_pgp_private_key_fingerprints = re.findall(
            gpg_regex,
            gpg_import_stdout_result.getvalue()
        )
        if len(gpg_imported_pgp_private_key_fingerprints) < 1:
            raise RuntimeError(
                "Error getting PGP fingerprint for PGP key"
                " to sign container image(s) with. See stdout and stderr for more info."
            )
        pgp_private_key_fingerprint = gpg_imported_pgp_private_key_fingerprints[0]

        print(
            "Imported PGP private key to sign artifacts with: "
            f"fingerprint='{pgp_private_key_fingerprint}'"
        )
    except sh.ErrorReturnCode as error:
        raise RuntimeError(
            f"Error importing pgp private key: {error}"
        ) from error

    return pgp_private_key_fingerprint
示例#15
0
    def _run_step(self):  # pylint: disable=too-many-locals
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        pom_file = self.get_value('pom-file')
        artifact_extensions = self.get_value('artifact-extensions')
        artifact_parent_dir = self.get_value('artifact-parent-dir')

        if not os.path.exists(pom_file):
            step_result.success = False
            step_result.message = f'Given pom file does not exist: {pom_file}'
            return step_result

        settings_file = self._generate_maven_settings()
        mvn_output_file_path = self.write_working_file('mvn_test_output.txt')
        try:
            with open(mvn_output_file_path, 'w') as mvn_output_file:
                out_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                    [sys.stdout, mvn_output_file])
                err_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                    [sys.stderr, mvn_output_file])

                sh.mvn(  # pylint: disable=no-member
                    'clean',
                    'install',
                    '-f',
                    pom_file,
                    '-s',
                    settings_file,
                    _out=out_callback,
                    _err=err_callback)
        except sh.ErrorReturnCode as error:
            step_result.success = False
            step_result.message = "Package failures. See 'maven-output' report artifacts " \
                f"for details: {error}"
            return step_result
        finally:
            step_result.add_artifact(
                description=
                "Standard out and standard error from 'mvn install'.",
                name='maven-output',
                value=mvn_output_file_path)

        # find the artifacts
        artifact_file_names = []
        artifact_parent_dir_full_path = \
            os.listdir(os.path.join(
                os.path.dirname(os.path.abspath(pom_file)),
                artifact_parent_dir))
        for filename in artifact_parent_dir_full_path:
            if any(filename.endswith(ext) for ext in artifact_extensions):
                artifact_file_names.append(filename)

        # error if we find more then one artifact
        # see https://projects.engineering.redhat.com/browse/NAPSSPO-546
        if len(artifact_file_names) > 1:
            step_result.success = False
            step_result.message = 'pom resulted in multiple artifacts with expected artifact ' \
                                  f'extensions ({artifact_extensions}), this is unsupported'
            return step_result

        if len(artifact_file_names) < 1:
            step_result.success = False
            step_result.message = 'pom resulted in 0 with expected artifact extensions ' \
                                  f'({artifact_extensions}), this is unsupported'
            return step_result

        artifact_id = get_xml_element(pom_file, 'artifactId').text
        group_id = get_xml_element(pom_file, 'groupId').text
        try:
            package_type = get_xml_element(pom_file, 'package').text
        except ValueError:
            package_type = 'jar'

        package_artifacts = {
            'path':
            os.path.join(os.path.dirname(os.path.abspath(pom_file)),
                         artifact_parent_dir, artifact_file_names[0]),
            'artifact-id':
            artifact_id,
            'group-id':
            group_id,
            'package-type':
            package_type,
            'pom-path':
            pom_file
        }

        # Currently, package returns ONE 'artifact', eg: one war file
        # However, in the future, an ARRAY could be returned, eg: several jar files
        step_result.add_artifact(name='package-artifacts',
                                 value=[package_artifacts])

        return step_result
示例#16
0
    def _run_step(self): # pylint: disable=too-many-locals,too-many-statements
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        settings_file = self._generate_maven_settings()
        pom_file = self.get_value('pom-file')
        fail_on_no_tests = self.get_value('fail-on-no-tests')
        selenium_hub_url = self.get_value('selenium-hub-url')
        deployed_host_urls = ConfigValue.convert_leaves_to_values(
            self.get_value('deployed-host-urls')
        )
        uat_maven_profile = self.get_value('uat-maven-profile')
        tls_verify = self.get_value('tls-verify')

        # NOTE:
        #   at some point may need to do smarter logic if a deployable has more then one deployed
        #   host URL to do UAT against all of them, but for now, use first one as target of UAT
        if isinstance(deployed_host_urls, list):
            target_base_url = deployed_host_urls[0]
            if len(deployed_host_urls) > 1:
                step_result.message = \
                    f"Given more then one deployed host URL ({deployed_host_urls})," \
                    f" targeting first one ({target_base_url}) for user acceptance test (UAT)."
                print(step_result.message)
        elif deployed_host_urls:
            target_base_url = deployed_host_urls
        else:
            target_base_url = self.get_value('target-host-url')


        # ensure surefire plugin enabled
        maven_surefire_plugin = self._get_effective_pom_element(
            element_path=MavenGeneric.SUREFIRE_PLUGIN_XML_ELEMENT_PATH
        )
        if maven_surefire_plugin is None:
            step_result.success = False
            step_result.message = 'Unit test dependency "maven-surefire-plugin" ' \
                f'missing from effective pom ({self._get_effective_pom()}).'
            return step_result

        # get surefire test results dir
        reports_dir = self._get_effective_pom_element(
            element_path=MavenGeneric.SUREFIRE_PLUGIN_REPORTS_DIR_XML_ELEMENT_PATH
        )
        if reports_dir is not None:
            test_results_dir = reports_dir.text
        else:
            test_results_dir = os.path.join(
                os.path.dirname(os.path.abspath(pom_file)),
                MavenGeneric.DEFAULT_SUREFIRE_PLUGIN_REPORTS_DIR
            )

        mvn_additional_options = []
        if not tls_verify:
            mvn_additional_options += [
                '-Dmaven.wagon.http.ssl.insecure=true',
                '-Dmaven.wagon.http.ssl.allowall=true',
                '-Dmaven.wagon.http.ssl.ignore.validity.dates=true',
            ]

        cucumber_html_report_path = os.path.join(self.work_dir_path, 'cucumber.html')
        cucumber_json_report_path = os.path.join(self.work_dir_path, 'cucumber.json')
        mvn_output_file_path = self.write_working_file('mvn_test_output.txt')
        try:
            with open(mvn_output_file_path, 'w') as mvn_output_file:
                out_callback = create_sh_redirect_to_multiple_streams_fn_callback([
                    sys.stdout,
                    mvn_output_file
                ])
                err_callback = create_sh_redirect_to_multiple_streams_fn_callback([
                    sys.stderr,
                    mvn_output_file
                ])
                sh.mvn( # pylint: disable=no-member
                    'clean',
                    'test',
                    f'-P{uat_maven_profile}',
                    f'-Dselenium.hub.url={selenium_hub_url}',
                    f'-Dtarget.base.url={target_base_url}',
                    f'-Dcucumber.plugin=' \
                        f'html:{cucumber_html_report_path},' \
                        f'json:{cucumber_json_report_path}',
                    '-f', pom_file,
                    '-s', settings_file,
                    *mvn_additional_options,
                    _out=out_callback,
                    _err=err_callback
                )

            if not os.path.isdir(test_results_dir) or len(os.listdir(test_results_dir)) == 0:
                if fail_on_no_tests:
                    step_result.message = "No user acceptance tests defined" \
                        f" using maven profile ({uat_maven_profile})."
                    step_result.success = False
                else:
                    step_result.message = "No user acceptance tests defined" \
                        f" using maven profile ({uat_maven_profile})," \
                        " but 'fail-on-no-tests' is False."
        except sh.ErrorReturnCode:
            step_result.message = "User acceptance test failures. See 'maven-output'" \
                ", 'surefire-reports', 'cucumber-report-html', and 'cucumber-report-json'" \
                " report artifacts for details."
            step_result.success = False

        step_result.add_artifact(
            description=f"Standard out and standard error by 'mvn -P{uat_maven_profile} test'.",
            name='maven-output',
            value=mvn_output_file_path
        )
        step_result.add_artifact(
            description=f"Surefire reports generated by 'mvn -P{uat_maven_profile} test'.",
            name='surefire-reports',
            value=test_results_dir
        )
        step_result.add_artifact(
            description=f"Cucumber (HTML) report generated by 'mvn -P{uat_maven_profile} test'.",
            name='cucumber-report-html',
            value=cucumber_html_report_path
        )
        step_result.add_artifact(
            description=f"Cucumber (JSON) report generated by 'mvn -P{uat_maven_profile} test'.",
            name='cucumber-report-json',
            value=cucumber_json_report_path
        )

        return step_result
示例#17
0
    def _run_step(self):  # pylint: disable=too-many-locals
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # Get config items
        maven_push_artifact_repo_id = self.get_value(
            'maven-push-artifact-repo-id')
        maven_push_artifact_repo_url = self.get_value(
            'maven-push-artifact-repo-url')
        version = self.get_value('version')
        package_artifacts = self.get_value('package-artifacts')
        tls_verify = self.get_value('tls-verify')

        # disable tls verification
        mvn_additional_options = []
        if not tls_verify:
            mvn_additional_options += [
                '-Dmaven.wagon.http.ssl.insecure=true',
                '-Dmaven.wagon.http.ssl.allowall=true',
                '-Dmaven.wagon.http.ssl.ignore.validity.dates=true',
            ]

        # Create settings.xml
        settings_file = self._generate_maven_settings()

        # push the artifacts
        push_artifacts = []
        mvn_output_file_path = self.write_working_file('mvn_test_output.txt')
        try:
            for package in package_artifacts:
                artifact_path = package['path']
                group_id = package['group-id']
                artifact_id = package['artifact-id']
                package_type = package['package-type']

                # push the artifact
                with open(mvn_output_file_path, 'a') as mvn_output_file:
                    out_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                        [sys.stdout, mvn_output_file])
                    err_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                        [sys.stderr, mvn_output_file])
                    sh.mvn(  # pylint: disable=no-member
                        'deploy:deploy-file',
                        '-Dversion=' + version,
                        '-Dfile=' + artifact_path,
                        '-DgroupId=' + group_id,
                        '-DartifactId=' + artifact_id,
                        '-Dpackaging=' + package_type,
                        '-Durl=' + maven_push_artifact_repo_url,
                        '-DrepositoryId=' + maven_push_artifact_repo_id,
                        '-s' + settings_file,
                        *mvn_additional_options,
                        _out=out_callback,
                        _err=err_callback)

                # record the pushed artifact
                push_artifacts.append({
                    'artifact-id': artifact_id,
                    'group-id': group_id,
                    'version': version,
                    'path': artifact_path,
                    'packaging': package_type,
                })
        except sh.ErrorReturnCode as error:
            step_result.success = False
            step_result.message = "Push artifacts failures. See 'maven-output' report artifacts " \
                f"for details: {error}"

        step_result.add_artifact(
            description="Standard out and standard error from 'mvn install'.",
            name='maven-output',
            value=mvn_output_file_path)
        step_result.add_artifact(name='push-artifacts', value=push_artifacts)
        return step_result
示例#18
0
    def __run_oscap_scan(  # pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements
            buildah_unshare_command,
            oscap_eval_type,
            oscap_input_file,
            oscap_out_file_path,
            oscap_xml_results_file_path,
            oscap_html_report_path,
            container_mount_path,
            oscap_profile=None,
            oscap_tailoring_file=None,
            oscap_fetch_remote_resources=True):
        """Run an oscap scan in the context of a buildah unshare to run "rootless".

        Parameters
        ----------
        buildah_unshare_command : sh.buildah.unshare.bake()
            A baked sh.buildah.unshare command to use to run this command in the context off
            so that this can be done "rootless".
        oscap_eval_type : str
            The type of oscap eval to perform. Must be a valid oscap eval type.
            EX: xccdf, oval
        oscap_input_file : str
            Path to rules file passed to the oscap command.
        oscap_out_file_path : str
            Path to write the stdout and stderr of running the oscap command to.
        oscap_xml_results_file_path : str
            Write the scan results into this file.
        oscap_html_report_path : str
            Write the human readable (HTML) report into this file.
        container_mount_path : str
            Path to the mounted container to scan.
        oscap_tailoring_file : str
            XCCF Tailoring file.
            See:
            - https://www.open-scap.org/security-policies/customization/
            - https://www.open-scap.org/resources/documentation/customizing-scap-security-guide-for-your-use-case/ # pylint: disable=line-too-long
            - https://static.open-scap.org/openscap-1.2/oscap_user_manual.html#_how_to_tailor_source_data_stream # pylint: disable=line-too-long
        oscap_profile : str
            OpenSCAP profile to evaluate. Must be a valid profile in the given oscap_input_file.
            EX: if you perform an `oscap info oscap_input_file` the profile must be listed.

        Returns
        -------
        oscap_eval_success : bool
            True if oscap eval passed all rules
            False if oscap eval failed any rules
        oscap_eval_fails : str
            If oscap_eval_success is True then indeterminate.
            If oscap_eval_success is False then string of all of the failed rules.

        Raises
        ------
        StepRunnerException
            If unexpected error running oscap scan.
        """

        oscap_profile_flag = None
        if oscap_profile is not None:
            oscap_profile_flag = f"--profile={oscap_profile}"

        oscap_fetch_remote_resources_flag = None
        if isinstance(oscap_fetch_remote_resources, str):
            oscap_fetch_remote_resources = strtobool(
                oscap_fetch_remote_resources)
        if oscap_fetch_remote_resources:
            oscap_fetch_remote_resources_flag = "--fetch-remote-resources"

        oscap_tailoring_file_flag = None
        if oscap_tailoring_file is not None:
            oscap_tailoring_file_flag = f"--tailoring-file={oscap_tailoring_file}"

        oscap_eval_success = None
        oscap_eval_out_buff = StringIO()
        oscap_eval_out = ""
        oscap_eval_fails = None
        try:
            oscap_chroot_command = buildah_unshare_command.bake("oscap-chroot")
            with open(oscap_out_file_path, 'w') as oscap_out_file:
                out_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                    [oscap_eval_out_buff, oscap_out_file])
                err_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                    [oscap_eval_out_buff, oscap_out_file])
                oscap_chroot_command(
                    container_mount_path,
                    oscap_eval_type,
                    'eval',
                    oscap_profile_flag,
                    oscap_fetch_remote_resources_flag,
                    oscap_tailoring_file_flag,
                    f'--results={oscap_xml_results_file_path}',
                    f'--report={oscap_html_report_path}',
                    oscap_input_file,
                    _out=out_callback,
                    _err=err_callback,
                    _tee='err')
                oscap_eval_success = True
        except sh.ErrorReturnCode_1 as error:  # pylint: disable=no-member
            oscap_eval_success = error
        except sh.ErrorReturnCode_2 as error:  # pylint: disable=no-member
            # XCCDF: If there is at least one rule with either fail or unknown result,
            #           oscap-scan finishes with return code 2.
            # OVAL:  Never returned
            #
            # Source: https://www.systutorials.com/docs/linux/man/8-oscap/
            if oscap_eval_type == 'xccdf':
                oscap_eval_success = False
            else:
                oscap_eval_success = error
        except sh.ErrorReturnCode as error:
            oscap_eval_success = error

        # get the oscap output
        oscap_eval_out = oscap_eval_out_buff.getvalue()

        # parse the oscap output
        # NOTE: oscap is puts carrage returns (\r / ^M) in their output, remove them
        oscap_eval_out = re.sub('\r', '', oscap_eval_out)

        # print the oscap output no matter the results
        print(oscap_eval_out)

        # if unexpected error throw error
        if isinstance(oscap_eval_success, Exception):
            raise StepRunnerException(
                f"Error running 'oscap {oscap_eval_type} eval': {oscap_eval_success} "
            ) from oscap_eval_success

        # NOTE: oscap oval eval returns exit code 0 whether or not any rules failed
        #       need to search output to determine if there were any rule failures
        if oscap_eval_type == 'oval' and oscap_eval_success:
            oscap_eval_fails = ""
            for match in OpenSCAPGeneric.OSCAP_OVAL_STDOUT_PATTERN.finditer(
                    oscap_eval_out):
                # NOTE: need to do regex and not == because may contain xterm color chars
                if OpenSCAPGeneric.OSCAP_OVAL_STDOUT_FAIL_PATTERN.search(
                        match.groupdict()['ruleresult']):
                    oscap_eval_fails += match.groupdict()['ruleblock']
                    oscap_eval_fails += "\n"
                    oscap_eval_success = False

        # if failed xccdf eval then parse out the fails
        if oscap_eval_type == 'xccdf' and not oscap_eval_success:
            oscap_eval_fails = ""
            for match in OpenSCAPGeneric.OSCAP_XCCDF_STDOUT_PATTERN.finditer(
                    oscap_eval_out):
                # NOTE: need to do regex and not == because may contain xterm color chars
                if re.search(r'fail', match.groupdict()['ruleresult']):
                    oscap_eval_fails += "\n"
                    oscap_eval_fails += match.groupdict()['ruleblock']
                    oscap_eval_fails += "\n"

        return oscap_eval_success, oscap_eval_fails
示例#19
0
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        pom_file = self.get_value('pom-file')
        fail_on_no_tests = self.get_value('fail-on-no-tests')

        # ensure surefire plugin enabled
        maven_surefire_plugin = self._get_effective_pom_element(
            element_path=MavenGeneric.SUREFIRE_PLUGIN_XML_ELEMENT_PATH
        )
        if maven_surefire_plugin is None:
            step_result.success = False
            step_result.message = 'Unit test dependency "maven-surefire-plugin" ' \
                f'missing from effective pom ({self._get_effective_pom()}).'
            return step_result

        # get surefire test results dir
        reports_dir = self._get_effective_pom_element(
            element_path=MavenGeneric.SUREFIRE_PLUGIN_REPORTS_DIR_XML_ELEMENT_PATH
        )
        if reports_dir is not None:
            test_results_dir = reports_dir.text
        else:
            test_results_dir = os.path.join(
                os.path.dirname(os.path.abspath(pom_file)),
                MavenGeneric.DEFAULT_SUREFIRE_PLUGIN_REPORTS_DIR
            )

        settings_file = self._generate_maven_settings()
        mvn_output_file_path = self.write_working_file('mvn_test_output.txt')
        try:
            with open(mvn_output_file_path, 'w') as mvn_output_file:
                out_callback = create_sh_redirect_to_multiple_streams_fn_callback([
                    sys.stdout,
                    mvn_output_file
                ])
                err_callback = create_sh_redirect_to_multiple_streams_fn_callback([
                    sys.stderr,
                    mvn_output_file
                ])

                sh.mvn( # pylint: disable=no-member
                    'clean',
                    'test',
                    '-f', pom_file,
                    '-s', settings_file,
                    _out=out_callback,
                    _err=err_callback
                )

            if not os.path.isdir(test_results_dir) or len(os.listdir(test_results_dir)) == 0:
                if fail_on_no_tests:
                    step_result.message = 'No unit tests defined.'
                    step_result.success = False
                else:
                    step_result.message = "No unit tests defined, but 'fail-on-no-tests' is False."
        except sh.ErrorReturnCode as error:
            step_result.message = "Unit test failures. See 'maven-output'" \
                f" and 'surefire-reports' report artifacts for details: {error}"
            step_result.success = False
        finally:
            step_result.add_artifact(
                description="Standard out and standard error from 'mvn test'.",
                name='maven-output',
                value=mvn_output_file_path
            )
            step_result.add_artifact(
                description="Surefire reports generated from 'mvn test'.",
                name='surefire-reports',
                value=test_results_dir
            )

        return step_result
    def _run_step(self):
        """Runs the step implemented by this StepImplementer.

        Returns
        -------
        StepResult
            Object containing the dictionary results of this step.
        """
        step_result = StepResult.from_step_implementer(self)

        # configlint-yml-path is required
        configlint_yml_path = self.get_value('configlint-yml-path')

        if not os.path.exists(configlint_yml_path):
            step_result.success = False
            step_result.message = 'File specified in ' \
                                  f'configlint-yml-path not found: {configlint_yml_path}'
            return step_result

        # Required: rules and exists
        rules_file = self.get_value('rules')
        if not os.path.exists(rules_file):
            step_result.success = False
            step_result.message = f'File specified in rules not found: {rules_file}'
            return step_result

        configlint_results_file_path = self.write_working_file('configlint_results_file.txt')
        try:
            # run config-lint writing stdout and stderr to the standard streams
            # as well as to a results file.
            with open(configlint_results_file_path, 'w') as configlint_results_file:
                out_callback = create_sh_redirect_to_multiple_streams_fn_callback([
                    sys.stdout,
                    configlint_results_file
                ])
                err_callback = create_sh_redirect_to_multiple_streams_fn_callback([
                    sys.stderr,
                    configlint_results_file
                ])

                sh.config_lint(  # pylint: disable=no-member
                    "-verbose",
                    "-debug",
                    "-rules",
                    rules_file,
                    configlint_yml_path,
                    _encoding='UTF-8',
                    _out=out_callback,
                    _err=err_callback,
                    _tee='err'
                )
        except sh.ErrorReturnCode_255:  # pylint: disable=no-member
            # NOTE: expected failure condition,
            #       aka, the config lint run, but found an issue
            #       stderr/stdout is captured in configlint_results_file_path
            step_result.success = False
            step_result.message = 'Failed config-lint scan.'
        except sh.ErrorReturnCode:
            # NOTE: un-expected failure condition
            #       aka, the config lint failed to run for some reason
            #       stderr/stdout is captured in configlint_results_file_path
            step_result.success = False
            step_result.message = 'Unexpected Error invoking config-lint.'

        step_result.add_artifact(
            name='configlint-result-set',
            value=configlint_results_file_path
        )
        step_result.add_artifact(
            name='configlint-yml-path',
            value=configlint_yml_path
        )
        return step_result
示例#21
0
    def _argocd_app_sync(
            argocd_app_name,
            argocd_sync_timeout_seconds,
            argocd_sync_retry_limit,
            argocd_sync_prune=True
    ):  # pylint: disable=line-too-long
        # add any additional flags
        argocd_sync_additional_flags = []
        if argocd_sync_prune:
            argocd_sync_additional_flags.append('--prune')

        for wait_for_op_retry in range(
                ArgoCDGeneric.MAX_ATTEMPT_TO_WAIT_FOR_ARGOCD_OP_RETRIES):
            # wait for any existing operations before requesting new synchronization
            #
            # NOTE: attempted work around for 'FailedPrecondition desc = another operation is
            #       already in progress' error
            # SEE: https://github.com/argoproj/argo-cd/issues/4505
            ArgoCDGeneric._argocd_app_wait_for_operation(
                argocd_app_name=argocd_app_name,
                argocd_timeout_seconds=argocd_sync_timeout_seconds)

            # sync app
            argocd_output_buff = StringIO()
            try:
                print(
                    f"Request synchronization of ArgoCD app ({argocd_app_name})."
                )
                out_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                    [sys.stdout, argocd_output_buff])
                err_callback = create_sh_redirect_to_multiple_streams_fn_callback(
                    [sys.stderr, argocd_output_buff])

                sh.argocd.app.sync(  # pylint: disable=no-member
                    *argocd_sync_additional_flags,
                    '--timeout',
                    argocd_sync_timeout_seconds,
                    '--retry-limit',
                    argocd_sync_retry_limit,
                    argocd_app_name,
                    _out=out_callback,
                    _err=err_callback)

                break
            except sh.ErrorReturnCode as error:
                # if error syncing because of in progress op
                # try again to wait for in progress op and do sync again
                #
                # NOTE: this can happen if we do the wait for op, and then an op starts and then
                #       we try to do a sync
                #
                # SEE: https://github.com/argoproj/argo-cd/issues/4505
                if re.match(ArgoCDGeneric.ARGOCD_OP_IN_PROGRESS_REGEX,
                            argocd_output_buff.getvalue()):
                    print(
                        f"ArgoCD Application ({argocd_app_name}) has an existing operation"
                        " that started after we already waited for existing operations but"
                        " before we tried to do a sync."
                        f" Try ({wait_for_op_retry} out of"
                        f" {ArgoCDGeneric.MAX_ATTEMPT_TO_WAIT_FOR_ARGOCD_OP_RETRIES}) again to"
                        " wait for the operation")
                    continue

                if not argocd_sync_prune:
                    prune_warning = ". Sync 'prune' option is disabled." \
                        " If sync error (see logs) was due to resource(s) that need to be pruned," \
                        " and the pruneable resources are intentionally there then see the ArgoCD" \
                        " documentation for instructions for argo to ignore the resource(s)." \
                        " See: https://argoproj.github.io/argo-cd/user-guide/sync-options/#no-prune-resources" \
                        " and https://argoproj.github.io/argo-cd/user-guide/compare-options/#ignoring-resources-that-are-extraneous"
                else:
                    prune_warning = ""

                raise StepRunnerException(
                    f"Error synchronization ArgoCD Application ({argocd_app_name})"
                    f"{prune_warning}: {error}") from error

        # wait for sync to finish
        ArgoCDGeneric._argocd_app_wait_for_health(
            argocd_app_name=argocd_app_name,
            argocd_timeout_seconds=argocd_sync_timeout_seconds)
示例#22
0
def run_maven( #pylint: disable=too-many-arguments
    mvn_output_file_path,
    settings_file,
    pom_file,
    phases_and_goals,
    tls_verify=True,
    additional_arguments=None,
    profiles=None,
    no_transfer_progress=True
):
    """Runs maven using the given configuration.

    Parameters
    ----------
    mvn_output_file_path : str
        Path to file containing the maven stdout and stderr output.
    phases_and_goals : [str]
        List of maven phases and/or goals to execute.
    additional_arguments : [str]
        List of additional arguments to use.
    pom_file : str (path)
        pom used when executing maven.
    tls_verify : boolean
        Disables TLS Verification if set to False
    profiles : [str]
        List of maven profiles to use.
    no_transfer_progress : boolean
        `True` to suppress the transfer progress of packages maven downloads.
        `False` to have the transfer progress printed.\
        See https://maven.apache.org/ref/current/maven-embedder/cli.html
    settings_file : str (path)
        Maven settings file to use.

    Returns
    -------
    str
        Standard Out and Standard Error from running Maven.

    Raises
    ------
    StepRunnerException
        If maven returns a none 0 exit code.
    """

    if not isinstance(phases_and_goals, list):
        phases_and_goals = [phases_and_goals]

    # create profile argument
    profiles_arguments = ""
    if profiles:
        profiles_arguments = ['-P', f"{','.join(profiles)}"]

    # create no transfer progress argument
    no_transfer_progress_argument = None
    if no_transfer_progress:
        no_transfer_progress_argument = '--no-transfer-progress'

    # create tls arguments
    tls_arguments = []
    if not tls_verify:
        tls_arguments += [
            '-Dmaven.wagon.http.ssl.insecure=true',
            '-Dmaven.wagon.http.ssl.allowall=true',
            '-Dmaven.wagon.http.ssl.ignore.validity.dates=true',
        ]

    if not additional_arguments:
        additional_arguments = []

    # run maven
    try:
        with open(mvn_output_file_path, 'w', encoding='utf-8') as mvn_output_file:
            out_callback = create_sh_redirect_to_multiple_streams_fn_callback([
                sys.stdout,
                mvn_output_file
            ])
            err_callback = create_sh_redirect_to_multiple_streams_fn_callback([
                sys.stderr,
                mvn_output_file
            ])

            sh.mvn( # pylint: disable=no-member
                *phases_and_goals,
                '-f', pom_file,
                '-s', settings_file,
                *profiles_arguments,
                no_transfer_progress_argument,
                *tls_arguments,
                *additional_arguments,
                _out=out_callback,
                _err=err_callback
            )
    except sh.ErrorReturnCode as error:
        raise StepRunnerException(
            f"Error running maven. {error}"
        ) from error