def __init__(self, id, method, params, session, options, workdir=None):
     # pylint: disable=redefined-builtin
     BaseTaskHandler.__init__(self, id, method, params, session, options,
                              workdir)
     self._osbs = None
     self.demux = None
     self._log_handler_added = False
     self.incremental_log_basename = 'openshift-incremental.log'
     self._user_warnings = UserWarningsStore()
Пример #2
0
def test_store_user_warnings(logs, expected, wrong_input, caplog):
    user_warnings = UserWarningsStore()

    for line in logs:
        if user_warnings.is_user_warning(line):
            user_warnings.store(line)

    if wrong_input:
        for input_ in wrong_input:
            message = 'Incorrect JSON data input for a user warning: {}'
            assert message.format(input_) in caplog.text

    assert sorted(user_warnings) == sorted(expected)

    user_warnings = str(user_warnings).splitlines()
    assert sorted(user_warnings) == sorted(expected)
Пример #3
0
def cmd_build_logs(args, osbs):
    build_id = args.BUILD_ID[0]
    follow = args.follow

    user_warnings = UserWarningsStore()
    logs = osbs.get_build_logs(build_id, follow=follow,
                               wait_if_missing=args.wait_if_missing,
                               decode=True)

    for line in logs:
        if user_warnings.is_user_warning(line):
            user_warnings.store(line)

        if follow:
            print(line)

    if not follow:
        print(logs, end="\n")

    if user_warnings:
        print("USER WARNINGS")
        print(user_warnings)
Пример #4
0
def _print_build_logs(args, osbs, build):
    user_warnings = UserWarningsStore()
    build_id = build.get_build_name()

    # we need to wait for kubelet to schedule the build, otherwise it's 500
    build = osbs.wait_for_build_to_get_scheduled(build_id)
    if not args.no_logs:
        build_logs = osbs.get_build_logs(build_id, follow=True, decode=True)
        if not isinstance(build_logs, collections.Iterable):
            logger.error("'%s' is not iterable; can't display logs", build_logs)
            return
        print("Build submitted (%s), watching logs (feel free to interrupt)" % build_id)
        try:
            for line in build_logs:
                if user_warnings.is_user_warning(line):
                    user_warnings.store(line)
                    continue

                print('{!r}'.format(line))
        except Exception as ex:
            logger.error("Error during fetching logs for build %s: %s", build_id, repr(ex))

        osbs.wait_for_build_to_finish(build_id)

        if user_warnings:
            print("USER WARNINGS")
            print(user_warnings)

        return _display_build_summary(osbs.get_build(build_id))
    else:
        if args.output == 'json':
            print_json_nicely(build.json)
        elif args.output == 'text':
            print(build_id)

        if osbs.get_build(build_id).is_succeeded():
            return 0
        else:
            return -1
Пример #5
0
def print_output(pipeline_run, export_metadata_file=None):
    user_warnings_store = UserWarningsStore()
    get_logs_passed = _print_pipeline_run_logs(pipeline_run,
                                               user_warnings_store)
    pipeline_run.wait_for_finish()
    build_metadata = _get_build_metadata(pipeline_run, user_warnings_store)
    _display_pipeline_run_summary(build_metadata)

    if export_metadata_file:
        with open(export_metadata_file, "w") as f:
            json.dump(build_metadata, f)

    if not get_logs_passed and pipeline_run.has_not_finished():
        pipeline_run_name = pipeline_run.pipeline_run_name
        try:
            logger.debug("Will try to cancel pipeline run: %s",
                         pipeline_run_name)
            pipeline_run.cancel_pipeline_run()
        except Exception as ex:
            logger.error("Error during canceling pipeline run %s: %s",
                         pipeline_run_name, repr(ex))
class BaseContainerTask(BaseTaskHandler):
    """Common class for BuildContainerTask and BuildSourceContainerTask"""
    def __init__(self, id, method, params, session, options, workdir=None):
        # pylint: disable=redefined-builtin
        BaseTaskHandler.__init__(self, id, method, params, session, options,
                                 workdir)
        self._osbs = None
        self.demux = None
        self._log_handler_added = False
        self.incremental_log_basename = 'openshift-incremental.log'
        self._user_warnings = UserWarningsStore()

    def osbs(self):
        """Handler of OSBS object"""
        if not self._osbs:
            os_conf = Configuration()
            build_conf = Configuration()
            if self.opts.get('scratch'):
                os_conf = Configuration(conf_section='scratch')
                build_conf = Configuration(conf_section='scratch')
            self._osbs = OSBS(os_conf, build_conf)
            if not self._osbs:
                msg = 'Could not successfully instantiate `osbs`'
                raise ContainerError(msg)
            self.setup_osbs_logging()

        return self._osbs

    def setup_osbs_logging(self):
        # Setting handler more than once will cause duplicated log lines.
        # Log handler will persist in child process.
        if not self._log_handler_added:
            osbs_logger = logging.getLogger(osbs.__name__)
            osbs_logger.setLevel(logging.INFO)
            log_file = os.path.join(self.resultdir(), 'osbs-client.log')
            handler = logging.FileHandler(filename=log_file)
            # process (PID) is useful because a buildContainer task forks main process
            formatter = logging.Formatter(
                '%(asctime)s - %(process)d - %(name)s - %(levelname)s - %(message)s'
            )
            handler.setFormatter(formatter)
            osbs_logger.addHandler(handler)

            self._log_handler_added = True

    def getUploadPath(self):
        """Get the path that should be used when uploading files to
        the hub."""
        return koji.pathinfo.taskrelpath(self.id)

    def resultdir(self):
        path = os.path.join(self.workdir, 'osbslogs')
        if not os.path.exists(path):
            os.makedirs(path)
        return path

    def _incremental_upload_logs(self, child_pid=None):
        resultdir = self.resultdir()
        uploadpath = self.getUploadPath()
        watcher = FileWatcher(resultdir, logger=self.logger)
        finished = False
        try:
            while not finished:
                if child_pid is None:
                    finished = True
                else:
                    time.sleep(1)
                    status = os.waitpid(child_pid, os.WNOHANG)
                    if status[0] != 0:
                        finished = True

                for result in watcher.files_to_upload():
                    if result is False:
                        return
                    (fd, fname) = result
                    incremental_upload(self.session,
                                       fname,
                                       fd,
                                       uploadpath,
                                       logger=self.logger)
        finally:
            watcher.clean()

    def _write_combined_log(self, build_id, logs_dir):
        log_filename = os.path.join(logs_dir, self.incremental_log_basename)

        self.logger.info("Will write follow log: %s",
                         self.incremental_log_basename)
        try:
            log = self.osbs().get_build_logs(build_id, follow=True)
        except Exception as error:
            msg = "Exception while waiting for build logs: %s" % error
            raise ContainerError(msg)
        with open(log_filename, 'wb') as outfile:
            try:
                for line in log:
                    outfile.write(("%s\n" % line).encode('utf-8'))
                    outfile.flush()
            except Exception as error:
                msg = "Exception (%s) while writing build logs: %s" % (
                    type(error), error)
                raise ContainerError(msg)

        self.logger.info("%s written", self.incremental_log_basename)

    def _write_demultiplexed_logs(self, build_id, logs_dir):
        self.logger.info("Will write demuxed logs in: %s/", logs_dir)
        try:
            logs = self.osbs().get_orchestrator_build_logs(build_id,
                                                           follow=True)
        except Exception as error:
            msg = "Exception while waiting for orchestrator build logs: %s" % error
            raise ContainerError(msg)
        platform_logs = {}
        for entry in logs:
            platform, line = entry.platform, entry.line
            if platform == METADATA_TAG:
                meta_file = line
                source_file = os.path.join(koji.pathinfo.work(), meta_file)
                uploadpath = os.path.join(logs_dir,
                                          os.path.basename(meta_file))
                shutil.copy(source_file, uploadpath)
                continue

            if self._user_warnings.is_user_warning(line):
                self._user_warnings.store(line)
                continue

            if platform not in platform_logs:
                prefix = 'orchestrator' if platform is None else platform
                log_filename = os.path.join(logs_dir, "%s.log" % prefix)
                platform_logs[platform] = open(log_filename, 'wb')
            try:
                platform_logs[platform].write((line + '\n').encode('utf-8'))
                platform_logs[platform].flush()
            except Exception as error:
                msg = "Exception ({}) while writing build logs: {}".format(
                    type(error), error)
                raise ContainerError(msg)

        if self._user_warnings:
            try:
                log_filename = os.path.join(logs_dir, "user_warnings.log")
                with open(log_filename, 'wb') as logfile:
                    logfile.write(str(self._user_warnings).encode('utf-8'))

                self.logger.info("user_warnings.log written")
            except Exception as error:
                msg = "Exception ({}) while writing user warnings: {}".format(
                    type(error), error)
                raise ContainerError(msg)

        for logfile in platform_logs.values():
            logfile.close()
            self.logger.info("%s written", logfile.name)

    def _write_incremental_logs(self, build_id, logs_dir):
        if self.demux and hasattr(self.osbs(), 'get_orchestrator_build_logs'):
            self._write_demultiplexed_logs(build_id, logs_dir)
        else:
            self._write_combined_log(build_id, logs_dir)

        build_response = self.osbs().get_build(build_id)
        if (build_response.is_running() or build_response.is_pending()):
            raise ContainerError("Build log finished but build still has not "
                                 "finished: %s." % build_response.status)

    def _get_repositories(self, response):
        repositories = []
        try:
            repo_dict = response.get_repositories()
            if repo_dict:
                for repos in repo_dict.values():
                    repositories.extend(repos)
        except Exception as error:
            self.logger.error(
                "Failed to get available repositories from: %r. "
                "Reason(%s): %s", repo_dict, type(error), error)
        return repositories

    def _get_koji_build_id(self, response):
        koji_build_id = None
        if hasattr(response, "get_koji_build_id"):
            koji_build_id = response.get_koji_build_id()
        else:
            self.logger.info("Koji content generator build ID not available.")

        return koji_build_id

    def _get_error_message(self, response):
        error_message = None
        if hasattr(response, "get_error_message"):
            error_message = response.get_error_message()
        else:
            self.logger.info("Error message is not available")

        return error_message

    def check_whitelist(self, name, target_info):
        """Check if container name is whitelisted in destination tag

        Raises with koji.BuildError if package is not whitelisted or blocked.
        """
        pkg_cfg = self.session.getPackageConfig(target_info['dest_tag_name'],
                                                name)
        self.logger.debug("%r", pkg_cfg)
        # Make sure package is on the list for this tag
        if pkg_cfg is None:
            raise koji.BuildError(
                "package (container) %s not in list for tag %s" %
                (name, target_info['dest_tag_name']))
        elif pkg_cfg['blocked']:
            raise koji.BuildError(
                "package (container)  %s is blocked for tag %s" %
                (name, target_info['dest_tag_name']))

    def upload_build_annotations(self, build_response):
        annotations = build_response.get_annotations() or {}
        whitelist_str = annotations.get('koji_task_annotations_whitelist',
                                        "[]")
        whitelist = json.loads(whitelist_str)
        task_annotations = {
            k: v
            for k, v in annotations.items() if k in whitelist
        }
        if task_annotations:
            f = StringIO()
            json.dump(task_annotations, f, sort_keys=True, indent=4)
            f.seek(0)
            incremental_upload(self.session,
                               ANNOTATIONS_FILENAME,
                               f,
                               self.getUploadPath(),
                               logger=self.logger)

    def handle_build_response(self, build_response, arch=None):
        build_id = build_response.get_build_name()
        self.logger.debug("OSBS build id: %r", build_id)

        # When builds are cancelled the builder plugin process gets SIGINT and SIGKILL
        # If osbs has started a build it should get cancelled
        def sigint_handler(*args, **kwargs):
            if not build_id:
                return

            self.logger.warning("Cannot read logs, cancelling build %s",
                                build_id)
            self.osbs().cancel_build(build_id)

        signal.signal(signal.SIGINT, sigint_handler)

        self.logger.debug("Waiting for osbs build_id: %s to be scheduled.",
                          build_id)
        # we need to wait for kubelet to schedule the build, otherwise it's 500
        self.osbs().wait_for_build_to_get_scheduled(build_id)
        self.logger.debug("Build was scheduled")

        osbs_logs_dir = self.resultdir()
        koji.ensuredir(osbs_logs_dir)
        pid = os.fork()
        if pid:
            try:
                self._incremental_upload_logs(pid)
            except koji.ActionNotAllowed:
                pass
        else:
            self._osbs = None

            try:
                self._write_incremental_logs(build_id, osbs_logs_dir)
            except Exception as error:
                self.logger.info("Error while saving incremental logs: %s",
                                 error)
                os._exit(1)
            os._exit(0)

        response = self.osbs().wait_for_build_to_finish(build_id)
        if response.is_succeeded():
            self.upload_build_annotations(response)

        self.logger.debug(
            "OSBS build finished with status: %s. Build "
            "response: %s.", response.status, response.json)

        self.logger.info("Response status: %r", response.is_succeeded())

        if response.is_cancelled():
            self.session.cancelTask(self.id)
            raise ContainerCancelled('Image build was cancelled by OSBS.')

        elif response.is_failed():
            error_message = self._get_error_message(response)
            if error_message:
                raise ContainerError(
                    'Image build failed. %s. OSBS build id: %s' %
                    (error_message, build_id))
            else:
                raise ContainerError('Image build failed. OSBS build id: %s' %
                                     build_id)

        repositories = []
        if response.is_succeeded():
            repositories = self._get_repositories(response)

        self.logger.info("Image available in the following repositories: %r",
                         repositories)

        koji_build_id = None
        if response.is_succeeded():
            koji_build_id = self._get_koji_build_id(response)

        self.logger.info("Koji content generator build ID: %s", koji_build_id)

        containerdata = {
            'task_id': self.id,
            'osbs_build_id': build_id,
            'files': [],
            'repositories': repositories,
            'koji_build_id': koji_build_id,
        }
        if arch:
            containerdata['arch'] = arch

        if self._user_warnings:
            containerdata['user_warnings'] = list(self._user_warnings)

        return containerdata