Example #1
0
def test_osbs_logs_get_log_files(tmpdir):
    class OSBS(object):
        def get_orchestrator_build_logs(self, build_id):
            logs = [
                LogEntry(None, 'orchestrator'),
                LogEntry('x86_64', 'Hurray for bacon: \u2017'),
                LogEntry('x86_64', 'line 2')
            ]
            return logs

    metadata = {
        'x86_64.log': {
            'checksum': 'c2487bf0142ea344df8b36990b0186be',
            'checksum_type': 'md5',
            'filename': 'x86_64.log',
            'filesize': 29
        },
        'orchestrator.log': {
            'checksum': 'ac9ed4cc35a9a77264ca3a0fb81be117',
            'checksum_type': 'md5',
            'filename': 'orchestrator.log',
            'filesize': 13
        }
    }

    logger = flexmock()
    flexmock(logger).should_receive('error')
    osbs_logs = OSBSLogs(logger)
    osbs = OSBS()
    output = osbs_logs.get_log_files(osbs, 1)
    for entry in output:
        assert entry[1] == metadata[entry[1]['filename']]
Example #2
0
    def combine_metadata_fragments(self):
        def add_buildroot_id(output, buildroot_id):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(file=logfile, metadata=metadata)

        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        metadata_version = 0

        worker_metadatas = self.workflow.postbuild_results.get(PLUGIN_FETCH_WORKER_METADATA_KEY)
        build = self.get_build(metadata, worker_metadatas)
        buildroot = self.get_buildroot(worker_metadatas)
        buildroot_id = buildroot[0]['id']
        output, output_file = self.get_output(worker_metadatas, buildroot_id)
        osbs_logs = OSBSLogs(self.log)
        output_files = [add_log_type(add_buildroot_id(md, buildroot_id))
                        for md in osbs_logs.get_log_files(self.osbs, self.build_id)]

        output.extend([of.metadata for of in output_files])
        if output_file:
            output_files.append(output_file)

        # add remote source tarball and remote-source.json files to output
        for remote_source_output in [
            get_source_tarball_output(self.workflow),
            get_remote_source_json_output(self.workflow)
        ]:
            if remote_source_output:
                add_custom_type(remote_source_output, KOJI_BTYPE_REMOTE_SOURCES)
                remote_source = add_buildroot_id(remote_source_output, buildroot_id)
                output_files.append(remote_source)
                output.append(remote_source.metadata)

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': buildroot,
            'output': output,
        }
        return koji_metadata, output_files
Example #3
0
 def get_output(self, buildroot_id: str) -> List[Dict[str, Any]]:
     # Both binary and source build have log files.
     outputs: List[Dict[str, Any]] = []
     koji_upload_files = self.workflow.data.koji_upload_files
     osbs_logs = OSBSLogs(self.log, get_platforms(self.workflow.data))
     log_files_outputs = osbs_logs.get_log_files(
         self.workflow.osbs, self.workflow.pipeline_run_name)
     for output in log_files_outputs:
         metadata = output.metadata
         metadata['buildroot_id'] = buildroot_id
         outputs.append(metadata)
         koji_upload_files.append({
             "local_filename": output.filename,
             "dest_filename": metadata["filename"],
         })
     return outputs
    def combine_metadata_fragments(self):
        def add_buildroot_id(output, buildroot_id):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(file=logfile, metadata=metadata)

        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        metadata_version = 0

        worker_metadatas = self.workflow.postbuild_results.get(
            PLUGIN_FETCH_WORKER_METADATA_KEY)
        build = self.get_build(metadata, worker_metadatas)
        buildroot = self.get_buildroot(worker_metadatas)
        buildroot_id = buildroot[0]['id']
        output = self.get_output(worker_metadatas)
        osbs_logs = OSBSLogs(self.log)
        output_files = [
            add_log_type(add_buildroot_id(md, buildroot_id))
            for md in osbs_logs.get_log_files(self.osbs, self.build_id)
        ]
        output.extend([of.metadata for of in output_files])

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': buildroot,
            'output': output,
        }
        return koji_metadata, output_files
    def combine_metadata_fragments(self):
        def add_buildroot_id(output, buildroot_id):
            logfile, metadata = output
            metadata.update({'buildroot_id': buildroot_id})
            return Output(file=logfile, metadata=metadata)

        def add_log_type(output):
            logfile, metadata = output
            metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(file=logfile, metadata=metadata)

        try:
            metadata = get_build_json()["metadata"]
            self.build_id = metadata["name"]
        except KeyError:
            self.log.error("No build metadata")
            raise

        metadata_version = 0

        worker_metadatas = self.workflow.postbuild_results.get(PLUGIN_FETCH_WORKER_METADATA_KEY)
        build = self.get_build(metadata, worker_metadatas)
        buildroot = self.get_buildroot(worker_metadatas)
        buildroot_id = buildroot[0]['id']
        output = self.get_output(worker_metadatas)
        osbs_logs = OSBSLogs(self.log)
        output_files = [add_log_type(add_buildroot_id(md, buildroot_id))
                        for md in osbs_logs.get_log_files(self.osbs, self.build_id)]
        output.extend([of.metadata for of in output_files])

        koji_metadata = {
            'metadata_version': metadata_version,
            'build': build,
            'buildroots': buildroot,
            'output': output,
        }
        return koji_metadata, output_files
Example #6
0
    def _fetch_log_files(self):
        osbs = get_openshift_session(self.workflow, self.openshift_fallback)
        build_id = get_build_json()['metadata']['name'] or {}
        osbs_logs = OSBSLogs(self.log)

        return osbs_logs.get_log_files(osbs, build_id)
Example #7
0
    def combine_metadata_fragments(self) -> Dict[str, Any]:
        """Construct the CG metadata and collect the output files for upload later."""
        def add_buildroot_id(output: Output, buildroot_id: str) -> Output:
            output.metadata.update({'buildroot_id': buildroot_id})
            return Output(filename=output.filename, metadata=output.metadata)

        def add_log_type(output: Output) -> Output:
            output.metadata.update({'type': 'log', 'arch': 'noarch'})
            return Output(filename=output.filename, metadata=output.metadata)

        build = self.get_build()
        buildroot = self.get_buildroot()
        buildroot_id = buildroot[0]['id']

        # Collect the output files, which will be uploaded later.
        koji_upload_files = self.workflow.data.koji_upload_files

        output: List[Dict[str, Any]]  # List of metadatas
        # The corresponding output file, only has one for source build
        output_file: Optional[Output]

        output, output_file = self.get_output(buildroot_id)
        if output_file:
            koji_upload_files.append({
                "local_filename": output_file.filename,
                "dest_filename": output[0]["filename"],
            })

        # Collect log files
        osbs_logs = OSBSLogs(self.log, get_platforms(self.workflow.data))
        log_files_output = [
            add_log_type(add_buildroot_id(md, buildroot_id)) for md in
            osbs_logs.get_log_files(self.osbs, self.workflow.pipeline_run_name)
        ]
        for log_file_output in log_files_output:
            output.append(log_file_output.metadata)
            koji_upload_files.append({
                "local_filename":
                log_file_output.filename,
                "dest_filename":
                log_file_output.metadata["filename"],
            })

        remote_source_file_outputs, kojifile_components = get_maven_metadata(
            self.workflow.data)

        # add maven components alongside RPM components
        for metadata in output:
            if metadata['type'] == 'docker-image':
                metadata['components'] += kojifile_components

        # add remote sources tarballs and remote sources json files to output
        for remote_source_output in [
                *get_source_tarballs_output(self.workflow),
                *get_remote_sources_json_output(self.workflow)
        ]:
            add_custom_type(remote_source_output, KOJI_BTYPE_REMOTE_SOURCES)
            remote_source = add_buildroot_id(remote_source_output,
                                             buildroot_id)
            output.append(remote_source.metadata)
            koji_upload_files.append({
                "local_filename":
                remote_source.filename,
                "dest_filename":
                remote_source.metadata["filename"],
            })

        for remote_source_file_output in remote_source_file_outputs:
            remote_source_file = add_buildroot_id(remote_source_file_output,
                                                  buildroot_id)
            output.append(remote_source_file.metadata)
            koji_upload_files.append({
                "local_filename":
                remote_source_file_output.filename,
                "dest_filename":
                remote_source_file_output.metadata["filename"],
            })

        koji_metadata = {
            'metadata_version': 0,
            'build': build,
            'buildroots': buildroot,
            'output': output,
        }
        return koji_metadata
    def _fetch_log_files(self):
        osbs = get_openshift_session(self.workflow, self.openshift_fallback)
        build_id = get_build_json()['metadata']['name'] or {}
        osbs_logs = OSBSLogs(self.log)

        return osbs_logs.get_log_files(osbs, build_id)