示例#1
0
    def repository_check(self, repository_pairs, archs):
        project, repository = repository_pairs[0]
        self.logger.info('checking {}/{}'.format(project, repository))

        if not len(archs):
            self.logger.debug(
                '{} has no relevant architectures'.format(project))
            return

        for arch in archs:
            directories = []
            for pair_project, pair_repository in repository_pairs:
                directories.append(
                    mirror(self.apiurl, pair_project, pair_repository, arch))

            parts = installcheck(directories, arch, [], [])
            if len(parts):
                self.comment.append('## {}/{}\n'.format(
                    repository_pairs[0][1], arch))
                self.comment.extend(parts)

        return len(self.comment) == 0
示例#2
0
    def check_pra(self, project, repository, arch):
        config = Config.get(self.apiurl, project)

        oldstate = None
        self.store_filename = 'rebuildpacs.{}-{}.yaml'.format(project, repository)
        if self.store_project and self.store_package:
            state_yaml = source_file_load(self.apiurl, self.store_project, self.store_package,
                                        self.store_filename)
            if state_yaml:
                oldstate = yaml.safe_load(state_yaml)

        oldstate = oldstate or {}
        oldstate.setdefault('check', {})
        if not isinstance(oldstate['check'], dict):
            oldstate['check'] = {}
        oldstate.setdefault('leafs', {})
        if not isinstance(oldstate['leafs'], dict):
            oldstate['leafs'] = {}

        repository_pairs = repository_path_expand(self.apiurl, project, repository)
        directories = []
        primaryxmls = []
        for pair_project, pair_repository in repository_pairs:
            mirrored = mirror(self.apiurl, pair_project, pair_repository, arch)
            if os.path.isdir(mirrored):
                directories.append(mirrored)
            else:
                primaryxmls.append(mirrored)

        parsed = dict()
        with tempfile.TemporaryDirectory(prefix='repochecker') as dir:
            pfile = os.path.join(dir, 'packages')

            SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
            script = os.path.join(SCRIPT_PATH, 'write_repo_susetags_file.pl')
            parts = ['perl', script, dir] + directories

            p = subprocess.run(parts)
            if p.returncode:
                # technically only 126, but there is no other value atm -
                # so if some other perl error happens, we don't continue
                raise CorruptRepos

            target_packages = []
            with open(os.path.join(dir, 'catalog.yml')) as file:
                catalog = yaml.safe_load(file)
                if catalog is not None:
                    target_packages = catalog.get(directories[0], [])

            parsed = parsed_installcheck([pfile] + primaryxmls, arch, target_packages, [])
            for package in parsed:
                parsed[package]['output'] = "\n".join(parsed[package]['output'])

            # let's risk a N*N algorithm in the hope that we have a limited N
            for package1 in parsed:
                output = parsed[package1]['output']
                for package2 in parsed:
                    if package1 == package2:
                        continue
                    output = output.replace(parsed[package2]['output'], 'FOLLOWUP(' + package2 + ')')
                parsed[package1]['output'] = output

            for package in parsed:
                parsed[package]['output'] = self._split_and_filter(parsed[package]['output'])

        url = makeurl(self.apiurl, ['build', project, '_result'], {
                      'repository': repository, 'arch': arch, 'code': 'succeeded'})
        root = ET.parse(http_GET(url)).getroot()
        succeeding = list(map(lambda x: x.get('package'), root.findall('.//status')))

        per_source = dict()

        for package, entry in parsed.items():
            source = "{}/{}/{}/{}".format(project, repository, arch, entry['source'])
            per_source.setdefault(source, {'output': [], 'builds': entry['source'] in succeeding})
            per_source[source]['output'].extend(entry['output'])

        rebuilds = set()

        for source in sorted(per_source):
            if not len(per_source[source]['output']):
                continue
            self.logger.debug("{} builds: {}".format(source, per_source[source]['builds']))
            self.logger.debug("  " + "\n  ".join(per_source[source]['output']))
            if not per_source[source]['builds']:  # nothing we can do
                continue
            old_output = oldstate['check'].get(source, {}).get('problem', [])
            if sorted(old_output) == sorted(per_source[source]['output']):
                self.logger.debug("unchanged problem")
                continue
            self.logger.info("rebuild %s", source)
            rebuilds.add(os.path.basename(source))
            for line in difflib.unified_diff(old_output, per_source[source]['output'], 'before', 'now'):
                self.logger.debug(line.strip())
            oldstate['check'][source] = {'problem': per_source[source]['output'],
                                         'rebuild': str(datetime.datetime.now())}

        for source in list(oldstate['check']):
            if not source.startswith('{}/{}/{}/'.format(project, repository, arch)):
                continue
            if not os.path.basename(source) in succeeding:
                continue
            if source not in per_source:
                self.logger.info("No known problem, erasing %s", source)
                del oldstate['check'][source]

        packages = config.get('rebuildpacs-leafs', '').split()
        if not self.rebuild: # ignore in this case
            packages = []

        # first round: collect all infos from obs
        infos = dict()
        for package in packages:
            subpacks, build_deps = self.check_leaf_package(project, repository, arch, package)
            infos[package] = {'subpacks': subpacks, 'deps': build_deps}

        # calculate rebuild triggers
        rebuild_triggers = dict()
        for package1 in packages:
            for package2 in packages:
                if package1 == package2:
                    continue
                for subpack in infos[package1]['subpacks']:
                    if subpack in infos[package2]['deps']:
                        rebuild_triggers.setdefault(package1, set())
                        rebuild_triggers[package1].add(package2)
                        # ignore this depencency. we already trigger both of them
                        del infos[package2]['deps'][subpack]

        # calculate build info hashes
        for package in packages:
            if not package in succeeding:
                self.logger.debug("Ignore %s for the moment, not succeeding", package)
                continue
            m = hashlib.sha256()
            for bdep in sorted(infos[package]['deps']):
                m.update(bytes(bdep + '-' + infos[package]['deps'][bdep], 'utf-8'))
            state_key = '{}/{}/{}/{}'.format(project, repository, arch, package)
            olddigest = oldstate['leafs'].get(state_key, {}).get('buildinfo')
            if olddigest == m.hexdigest():
                continue
            self.logger.info("rebuild leaf package %s (%s vs %s)", package, olddigest, m.hexdigest())
            rebuilds.add(package)
            oldstate['leafs'][state_key] = {'buildinfo': m.hexdigest(),
                                            'rebuild': str(datetime.datetime.now())}

        if self.dryrun:
            if self.rebuild:
                self.logger.info("To rebuild: %s", ' '.join(rebuilds))
            return

        if not self.rebuild or not len(rebuilds):
            self.logger.debug("Nothing to rebuild")
            # in case we do rebuild, wait for it to succeed before saving
            self.store_yaml(oldstate, project, repository, arch)
            return

        query = {'cmd': 'rebuild', 'repository': repository, 'arch': arch, 'package': rebuilds}
        url = makeurl(self.apiurl, ['build', project])
        headers = { 'Content-Type': 'application/x-www-form-urlencoded' }
        http_request('POST', url, headers, data=urlencode(query, doseq=True))

        self.store_yaml(oldstate, project, repository, arch)
    def staging(self, project, force=False):
        api = self.api

        repository = self.api.cmain_repo

        # fetch the build ids at the beginning - mirroring takes a while
        buildids = {}
        try:
            architectures = self.target_archs(project, repository)
        except HTTPError as e:
            if e.code == 404:
                # adi disappear all the time, so don't worry
                return False
            raise e

        all_done = True
        for arch in architectures:
            pra = '{}/{}/{}'.format(project, repository, arch)
            buildid = self.buildid(project, repository, arch)
            if not buildid:
                self.logger.error('No build ID in {}'.format(pra))
                return False
            buildids[arch] = buildid
            url = self.report_url(project, repository, arch, buildid)
            try:
                root = ET.parse(osc.core.http_GET(url)).getroot()
                check = root.find('check[@name="installcheck"]/state')
                if check is not None and check.text != 'pending':
                    self.logger.info('{} already "{}", ignoring'.format(
                        pra, check.text))
                else:
                    all_done = False
            except HTTPError:
                self.logger.info('{} has no status report'.format(pra))
                all_done = False

        if all_done and not force:
            return True

        repository_pairs = repository_path_expand(api.apiurl, project,
                                                  repository)
        result_comment = []

        result = True
        to_ignore = self.packages_to_ignore(project)
        status = api.project_status(project)
        if status is None:
            self.logger.error('no project status for {}'.format(project))
            return False

        for req in status.findall('staged_requests/request'):
            if req.get('type') == 'delete':
                result = result and self.check_delete_request(
                    req, to_ignore, result_comment)

        for arch in architectures:
            # hit the first repository in the target project (if existant)
            target_pair = None
            directories = []
            for pair_project, pair_repository in repository_pairs:
                # ignore repositories only inherited for config
                if repository_arch_state(self.api.apiurl, pair_project,
                                         pair_repository, arch):
                    if not target_pair and pair_project == api.project:
                        target_pair = [pair_project, pair_repository]

                    directories.append(
                        mirror(self.api.apiurl, pair_project, pair_repository,
                               arch))

            if not api.is_adi_project(project):
                # For "leaky" ring packages in letter stagings, where the
                # repository setup does not include the target project, that are
                # not intended to to have all run-time dependencies satisfied.
                whitelist = self.ring_whitelist
            else:
                whitelist = set()

            whitelist |= to_ignore
            ignore_conflicts = self.ignore_conflicts | to_ignore

            check = self.cycle_check(project, repository, arch)
            if not check.success:
                self.logger.warning('Cycle check failed')
                result_comment.append(check.comment)
                result = False

            check = self.install_check(directories, arch, whitelist,
                                       ignore_conflicts)
            if not check.success:
                self.logger.warning('Install check failed')
                result_comment.append(check.comment)
                result = False

        duplicates = duplicated_binaries_in_repo(self.api.apiurl, project,
                                                 repository)
        # remove white listed duplicates
        for arch in list(duplicates):
            for binary in self.ignore_duplicated:
                duplicates[arch].pop(binary, None)
            if not len(duplicates[arch]):
                del duplicates[arch]
        if len(duplicates):
            self.logger.warning('Found duplicated binaries')
            result_comment.append(
                yaml.dump(duplicates, default_flow_style=False))
            result = False

        if result:
            self.report_state('success', self.gocd_url(), project, repository,
                              buildids)
        else:
            result_comment.insert(
                0, 'Generated from {}\n'.format(self.gocd_url()))
            self.report_state('failure',
                              self.upload_failure(project, result_comment),
                              project, repository, buildids)
            self.logger.warning('Not accepting {}'.format(project))
            return False

        return result