Esempio n. 1
0
    def request_repository_pairs(self, request, action):
        if str2bool(
                Config.get(self.apiurl,
                           action.tgt_project).get('repo_checker-project-skip',
                                                   'False')):
            # Do not change message as this should only occur in requests
            # targeting multiple projects such as in maintenance workflow in
            # which the message should be set by other actions.
            self.logger.debug('skipping review of action targeting {}'.format(
                action.tgt_project))
            return True

        repository = self.project_repository(action.tgt_project)
        if not repository:
            self.review_messages['declined'] = ERROR_REPO_SPECIFIED.format(
                action.tgt_project)
            return False

        repository_pairs = []
        # Assumes maintenance_release target project has staging disabled.
        staging = Config.get(self.apiurl, action.tgt_project).get('staging')
        if staging:
            api = self.staging_api(staging)
            stage_info = api.packages_staged.get(action.tgt_package)
            if not stage_info or str(stage_info['rq_id']) != str(
                    request.reqid):
                self.logger.info('{} not staged'.format(request.reqid))
                return None

            if not self.force and not self.staging_build_failure_check(
                    api, stage_info['prj']):
                self.logger.info(
                    '{} not ready due to staging build failure(s)'.format(
                        request.reqid))
                return None

            # Staging setup is convoluted and thus the repository setup does not
            # contain a path to the target project. Instead the ports repository
            # is used to import the target prjconf. As such the staging group
            # repository must be explicitly layered on top of target project.
            repository_pairs.append([stage_info['prj'], repository])
            repository_pairs.extend(
                repository_path_expand(self.apiurl, action.tgt_project,
                                       repository))
        else:
            # Find a repository which links to target project "main" repository.
            repository = repository_path_search(self.apiurl,
                                                action.src_project,
                                                action.tgt_project, repository)
            if not repository:
                self.review_messages['declined'] = ERROR_REPO_SPECIFIED.format(
                    action.tgt_project)
                return False

            repository_pairs.extend(
                repository_path_expand(self.apiurl, action.src_project,
                                       repository))

        return repository_pairs
    def project_only(self, project, post_comments=False):
        repository = self.project_repository(project)
        if not repository:
            self.logger.error(ERROR_REPO_SPECIFIED.format(project))
            return

        repository_pairs = repository_path_expand(self.apiurl, project, repository)
        state_hash = self.repository_state(repository_pairs, False)
        self.repository_check(repository_pairs, state_hash, False, bool(post_comments))
 def test_factory_staging(self):
     self.add_project('openSUSE:Factory')
     self.add_project('openSUSE:Factory:Staging')
     self.add_project('openSUSE:Factory:Staging:H')
     repos = repository_path_expand(self.wf.api.apiurl, 'openSUSE:Factory:Staging:H', 'standard')
     self.assertEqual([['openSUSE:Factory:Staging:H', 'standard'],
                       ['openSUSE:Factory:Staging:H', 'bootstrap_copy'],
                       ['openSUSE:Factory:Staging', 'standard'],
                       ['openSUSE:Factory', 'ports']], repos)
    def request_repository_pairs(self, request, action):
        if str2bool(Config.get(self.apiurl, action.tgt_project).get('repo_checker-project-skip', 'False')):
            # Do not change message as this should only occur in requests
            # targeting multiple projects such as in maintenance workflow in
            # which the message should be set by other actions.
            self.logger.debug('skipping review of action targeting {}'.format(action.tgt_project))
            return True

        repository = self.project_repository(action.tgt_project)
        if not repository:
            self.review_messages['declined'] = ERROR_REPO_SPECIFIED.format(action.tgt_project)
            return False

        repository_pairs = []
        # Assumes maintenance_release target project has staging disabled.
        staging = Config.get(self.apiurl, action.tgt_project).get('staging')
        if staging:
            api = self.staging_api(staging)
            stage_info = api.packages_staged.get(action.tgt_package)
            if not stage_info or str(stage_info['rq_id']) != str(request.reqid):
                self.logger.info('{} not staged'.format(request.reqid))
                return None

            if not self.force and not self.staging_build_failure_check(api, stage_info['prj']):
                self.logger.info('{} not ready due to staging build failure(s)'.format(request.reqid))
                return None

            # Staging setup is convoluted and thus the repository setup does not
            # contain a path to the target project. Instead the ports repository
            # is used to import the target prjconf. As such the staging group
            # repository must be explicitly layered on top of target project.
            repository_pairs.append([stage_info['prj'], repository])
            repository_pairs.extend(repository_path_expand(self.apiurl, action.tgt_project, repository))
        else:
            # Find a repository which links to target project "main" repository.
            repository = repository_path_search(
                self.apiurl, action.src_project, action.tgt_project, repository)
            if not repository:
                self.review_messages['declined'] = ERROR_REPO_SPECIFIED.format(action.tgt_project)
                return False

            repository_pairs.extend(repository_path_expand(self.apiurl, action.src_project, repository))

        return repository_pairs
Esempio n. 5
0
    def test_sp5_setup(self):
        self.add_project('SUSE:SLE-12:GA')
        self.add_project('SUSE:SLE-12:Update')
        self.add_project('SUSE:SLE-12-SP1:GA')
        self.add_project('SUSE:SLE-12-SP1:Update')
        self.add_project('SUSE:SLE-12-SP2:GA')
        self.add_project('SUSE:SLE-12-SP2:Update')
        self.add_project('SUSE:SLE-12-SP3:GA')
        self.add_project('SUSE:SLE-12-SP3:Update')
        self.add_project('SUSE:SLE-12-SP4:GA')
        self.add_project('SUSE:SLE-12-SP4:Update')
        self.add_project('SUSE:SLE-12-SP5:GA')
        self.add_project('SUSE:SLE-12-SP5:GA:Staging:A')

        repos = repository_path_expand(self.wf.api.apiurl,
                                       'SUSE:SLE-12-SP5:GA', 'standard')
        self.assertEqual([['SUSE:SLE-12-SP5:GA', 'standard'],
                          ['SUSE:SLE-12-SP4:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP4:GA', 'standard'],
                          ['SUSE:SLE-12-SP3:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP3:GA', 'standard'],
                          ['SUSE:SLE-12-SP2:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP2:GA', 'standard'],
                          ['SUSE:SLE-12-SP1:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP1:GA', 'standard'],
                          ['SUSE:SLE-12:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12:GA', 'standard']], repos)

        repos = repository_path_expand(self.wf.api.apiurl,
                                       'SUSE:SLE-12-SP5:GA:Staging:A',
                                       'standard')
        self.assertEqual([['SUSE:SLE-12-SP5:GA:Staging:A', 'standard'],
                          ['SUSE:SLE-12-SP5:GA', 'ports'],
                          ['SUSE:SLE-12-SP4:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP4:GA', 'standard'],
                          ['SUSE:SLE-12-SP3:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP3:GA', 'standard'],
                          ['SUSE:SLE-12-SP2:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP2:GA', 'standard'],
                          ['SUSE:SLE-12-SP1:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP1:GA', 'standard'],
                          ['SUSE:SLE-12:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12:GA', 'standard']], repos)
Esempio n. 6
0
    def project_only(self, project, post_comments=False):
        repository = self.project_repository(project)
        if not repository:
            self.logger.error(ERROR_REPO_SPECIFIED.format(project))
            return

        repository_pairs = repository_path_expand(self.apiurl, project,
                                                  repository)
        state_hash = self.repository_state(repository_pairs)
        self.repository_check(repository_pairs, state_hash, False,
                              bool(post_comments))
Esempio n. 7
0
 def test_factory_staging(self):
     self.add_project('openSUSE:Factory')
     self.add_project('openSUSE:Factory:Staging')
     self.add_project('openSUSE:Factory:Staging:H')
     repos = repository_path_expand(self.wf.api.apiurl,
                                    'openSUSE:Factory:Staging:H',
                                    'standard')
     self.assertEqual([['openSUSE:Factory:Staging:H', 'standard'],
                       ['openSUSE:Factory:Staging:H', 'bootstrap_copy'],
                       ['openSUSE:Factory:Staging', 'standard'],
                       ['openSUSE:Factory', 'ports']], repos)
    def project_only(self, project, post_comments=False):
        repository = self.project_repository(project)
        if not repository:
            self.logger.error(ERROR_REPO_SPECIFIED.format(project))
            return

        config = Config.get(self.apiurl, project)
        arch_whitelist = config.get('repo_checker-arch-whitelist')

        repository_pairs = repository_path_expand(self.apiurl, project, repository)
        state_hash = self.repository_state(repository_pairs, False)
        self.repository_check(repository_pairs, state_hash, False, bool(post_comments), arch_whitelist=arch_whitelist)
    def project_only(self, project, post_comments=False):
        repository = self.project_repository(project)
        if not repository:
            self.logger.error(ERROR_REPO_SPECIFIED.format(project))
            return

        config = Config.get(self.apiurl, project)
        arch_whitelist = config.get('repo_checker-arch-whitelist')

        repository_pairs = repository_path_expand(self.apiurl, project, repository)
        state_hash = self.repository_state(repository_pairs, False)
        self.repository_check(repository_pairs, state_hash, False, bool(post_comments), arch_whitelist=arch_whitelist)
    def test_sp5_setup(self):
        self.add_project('SUSE:SLE-12:GA')
        self.add_project('SUSE:SLE-12:Update')
        self.add_project('SUSE:SLE-12-SP1:GA')
        self.add_project('SUSE:SLE-12-SP1:Update')
        self.add_project('SUSE:SLE-12-SP2:GA')
        self.add_project('SUSE:SLE-12-SP2:Update')
        self.add_project('SUSE:SLE-12-SP3:GA')
        self.add_project('SUSE:SLE-12-SP3:Update')
        self.add_project('SUSE:SLE-12-SP4:GA')
        self.add_project('SUSE:SLE-12-SP4:Update')
        self.add_project('SUSE:SLE-12-SP5:GA')
        self.add_project('SUSE:SLE-12-SP5:GA:Staging:A')

        repos = repository_path_expand(self.wf.api.apiurl, 'SUSE:SLE-12-SP5:GA', 'standard')
        self.assertEqual([['SUSE:SLE-12-SP5:GA', 'standard'],
                          ['SUSE:SLE-12-SP4:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP4:GA', 'standard'],
                          ['SUSE:SLE-12-SP3:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP3:GA', 'standard'],
                          ['SUSE:SLE-12-SP2:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP2:GA', 'standard'],
                          ['SUSE:SLE-12-SP1:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP1:GA', 'standard'],
                          ['SUSE:SLE-12:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12:GA', 'standard']], repos)

        repos = repository_path_expand(self.wf.api.apiurl, 'SUSE:SLE-12-SP5:GA:Staging:A', 'standard')
        self.assertEqual([['SUSE:SLE-12-SP5:GA:Staging:A', 'standard'],
                          ['SUSE:SLE-12-SP5:GA', 'ports'],
                          ['SUSE:SLE-12-SP4:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP4:GA', 'standard'],
                          ['SUSE:SLE-12-SP3:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP3:GA', 'standard'],
                          ['SUSE:SLE-12-SP2:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP2:GA', 'standard'],
                          ['SUSE:SLE-12-SP1:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12-SP1:GA', 'standard'],
                          ['SUSE:SLE-12:Update', 'snapshot-SP5'],
                          ['SUSE:SLE-12:GA', 'standard']], repos)
Esempio n. 11
0
    def check_action_maintenance_release(self, request, action):
        # No reason to special case patchinfo since same source and target
        # projects which is all that repo_checker cares about.

        if action.tgt_project in self.checked_targets:
            return True

        target_config = Config.get(self.apiurl, action.tgt_project)
        if str2bool(target_config.get('repo_checker-project-skip', 'False')):
            # Do not change message as this should only occur in requests
            # targeting multiple projects such as in maintenance workflow in
            # which the message should be set by other actions.
            self.logger.debug('skipping review of action targeting {}'.format(
                action.tgt_project))
            return True

        repository = target_config.get('main-repo')
        if not repository:
            raise Exception('Missing main-repo in OSRT:Config')

        # Find a repository which links to target project "main" repository.
        repository = repository_path_search(self.apiurl, action.src_project,
                                            action.tgt_project, repository)
        if not repository:
            raise Exception('Missing repositories')

        repository_pairs = repository_path_expand(self.apiurl,
                                                  action.src_project,
                                                  repository)

        self.checked_targets.add(action.tgt_project)
        archs = set(target_archs(self.apiurl, action.src_project, repository))
        arch_whitelist = target_config.get('repo_checker-arch-whitelist', None)
        if arch_whitelist:
            archs = set(arch_whitelist.split(' ')).intersection(archs)

        if not self.repository_check(repository_pairs, archs):
            return None

        self.review_messages['accepted'] = 'install check passed'
        return True
Esempio n. 12
0
 def expand_repos(self, project, repo='standard'):
     return repository_path_expand(self.apiurl, project, repo)
Esempio n. 13
0
    def check_pra(self, project, repository, arch):
        config = Config.get(self.apiurl, project)

        oldstate = None
        self.store_filename = 'rebuildpacs.{}-{}.yaml'.format(project, repository)
        if self.store_project and self.store_package:
            state_yaml = source_file_load(self.apiurl, self.store_project, self.store_package,
                                        self.store_filename)
            if state_yaml:
                oldstate = yaml.safe_load(state_yaml)

        oldstate = oldstate or {}
        oldstate.setdefault('check', {})
        if not isinstance(oldstate['check'], dict):
            oldstate['check'] = {}
        oldstate.setdefault('leafs', {})
        if not isinstance(oldstate['leafs'], dict):
            oldstate['leafs'] = {}

        repository_pairs = repository_path_expand(self.apiurl, project, repository)
        directories = []
        primaryxmls = []
        for pair_project, pair_repository in repository_pairs:
            mirrored = mirror(self.apiurl, pair_project, pair_repository, arch)
            if os.path.isdir(mirrored):
                directories.append(mirrored)
            else:
                primaryxmls.append(mirrored)

        parsed = dict()
        with tempfile.TemporaryDirectory(prefix='repochecker') as dir:
            pfile = os.path.join(dir, 'packages')

            SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
            script = os.path.join(SCRIPT_PATH, 'write_repo_susetags_file.pl')
            parts = ['perl', script, dir] + directories

            p = subprocess.run(parts)
            if p.returncode:
                # technically only 126, but there is no other value atm -
                # so if some other perl error happens, we don't continue
                raise CorruptRepos

            target_packages = []
            with open(os.path.join(dir, 'catalog.yml')) as file:
                catalog = yaml.safe_load(file)
                if catalog is not None:
                    target_packages = catalog.get(directories[0], [])

            parsed = parsed_installcheck([pfile] + primaryxmls, arch, target_packages, [])
            for package in parsed:
                parsed[package]['output'] = "\n".join(parsed[package]['output'])

            # let's risk a N*N algorithm in the hope that we have a limited N
            for package1 in parsed:
                output = parsed[package1]['output']
                for package2 in parsed:
                    if package1 == package2:
                        continue
                    output = output.replace(parsed[package2]['output'], 'FOLLOWUP(' + package2 + ')')
                parsed[package1]['output'] = output

            for package in parsed:
                parsed[package]['output'] = self._split_and_filter(parsed[package]['output'])

        url = makeurl(self.apiurl, ['build', project, '_result'], {
                      'repository': repository, 'arch': arch, 'code': 'succeeded'})
        root = ET.parse(http_GET(url)).getroot()
        succeeding = list(map(lambda x: x.get('package'), root.findall('.//status')))

        per_source = dict()

        for package, entry in parsed.items():
            source = "{}/{}/{}/{}".format(project, repository, arch, entry['source'])
            per_source.setdefault(source, {'output': [], 'builds': entry['source'] in succeeding})
            per_source[source]['output'].extend(entry['output'])

        rebuilds = set()

        for source in sorted(per_source):
            if not len(per_source[source]['output']):
                continue
            self.logger.debug("{} builds: {}".format(source, per_source[source]['builds']))
            self.logger.debug("  " + "\n  ".join(per_source[source]['output']))
            if not per_source[source]['builds']:  # nothing we can do
                continue
            old_output = oldstate['check'].get(source, {}).get('problem', [])
            if sorted(old_output) == sorted(per_source[source]['output']):
                self.logger.debug("unchanged problem")
                continue
            self.logger.info("rebuild %s", source)
            rebuilds.add(os.path.basename(source))
            for line in difflib.unified_diff(old_output, per_source[source]['output'], 'before', 'now'):
                self.logger.debug(line.strip())
            oldstate['check'][source] = {'problem': per_source[source]['output'],
                                         'rebuild': str(datetime.datetime.now())}

        for source in list(oldstate['check']):
            if not source.startswith('{}/{}/{}/'.format(project, repository, arch)):
                continue
            if not os.path.basename(source) in succeeding:
                continue
            if source not in per_source:
                self.logger.info("No known problem, erasing %s", source)
                del oldstate['check'][source]

        packages = config.get('rebuildpacs-leafs', '').split()
        if not self.rebuild: # ignore in this case
            packages = []

        # first round: collect all infos from obs
        infos = dict()
        for package in packages:
            subpacks, build_deps = self.check_leaf_package(project, repository, arch, package)
            infos[package] = {'subpacks': subpacks, 'deps': build_deps}

        # calculate rebuild triggers
        rebuild_triggers = dict()
        for package1 in packages:
            for package2 in packages:
                if package1 == package2:
                    continue
                for subpack in infos[package1]['subpacks']:
                    if subpack in infos[package2]['deps']:
                        rebuild_triggers.setdefault(package1, set())
                        rebuild_triggers[package1].add(package2)
                        # ignore this depencency. we already trigger both of them
                        del infos[package2]['deps'][subpack]

        # calculate build info hashes
        for package in packages:
            if not package in succeeding:
                self.logger.debug("Ignore %s for the moment, not succeeding", package)
                continue
            m = hashlib.sha256()
            for bdep in sorted(infos[package]['deps']):
                m.update(bytes(bdep + '-' + infos[package]['deps'][bdep], 'utf-8'))
            state_key = '{}/{}/{}/{}'.format(project, repository, arch, package)
            olddigest = oldstate['leafs'].get(state_key, {}).get('buildinfo')
            if olddigest == m.hexdigest():
                continue
            self.logger.info("rebuild leaf package %s (%s vs %s)", package, olddigest, m.hexdigest())
            rebuilds.add(package)
            oldstate['leafs'][state_key] = {'buildinfo': m.hexdigest(),
                                            'rebuild': str(datetime.datetime.now())}

        if self.dryrun:
            if self.rebuild:
                self.logger.info("To rebuild: %s", ' '.join(rebuilds))
            return

        if not self.rebuild or not len(rebuilds):
            self.logger.debug("Nothing to rebuild")
            # in case we do rebuild, wait for it to succeed before saving
            self.store_yaml(oldstate, project, repository, arch)
            return

        query = {'cmd': 'rebuild', 'repository': repository, 'arch': arch, 'package': rebuilds}
        url = makeurl(self.apiurl, ['build', project])
        headers = { 'Content-Type': 'application/x-www-form-urlencoded' }
        http_request('POST', url, headers, data=urlencode(query, doseq=True))

        self.store_yaml(oldstate, project, repository, arch)
    def staging(self, project, force=False):
        api = self.api

        repository = self.api.cmain_repo

        # fetch the build ids at the beginning - mirroring takes a while
        buildids = {}
        try:
            architectures = self.target_archs(project, repository)
        except HTTPError as e:
            if e.code == 404:
                # adi disappear all the time, so don't worry
                return False
            raise e

        all_done = True
        for arch in architectures:
            pra = '{}/{}/{}'.format(project, repository, arch)
            buildid = self.buildid(project, repository, arch)
            if not buildid:
                self.logger.error('No build ID in {}'.format(pra))
                return False
            buildids[arch] = buildid
            url = self.report_url(project, repository, arch, buildid)
            try:
                root = ET.parse(osc.core.http_GET(url)).getroot()
                check = root.find('check[@name="installcheck"]/state')
                if check is not None and check.text != 'pending':
                    self.logger.info('{} already "{}", ignoring'.format(
                        pra, check.text))
                else:
                    all_done = False
            except HTTPError:
                self.logger.info('{} has no status report'.format(pra))
                all_done = False

        if all_done and not force:
            return True

        repository_pairs = repository_path_expand(api.apiurl, project,
                                                  repository)
        result_comment = []

        result = True
        to_ignore = self.packages_to_ignore(project)
        status = api.project_status(project)
        if status is None:
            self.logger.error('no project status for {}'.format(project))
            return False

        for req in status.findall('staged_requests/request'):
            if req.get('type') == 'delete':
                result = result and self.check_delete_request(
                    req, to_ignore, result_comment)

        for arch in architectures:
            # hit the first repository in the target project (if existant)
            target_pair = None
            directories = []
            for pair_project, pair_repository in repository_pairs:
                # ignore repositories only inherited for config
                if repository_arch_state(self.api.apiurl, pair_project,
                                         pair_repository, arch):
                    if not target_pair and pair_project == api.project:
                        target_pair = [pair_project, pair_repository]

                    directories.append(
                        mirror(self.api.apiurl, pair_project, pair_repository,
                               arch))

            if not api.is_adi_project(project):
                # For "leaky" ring packages in letter stagings, where the
                # repository setup does not include the target project, that are
                # not intended to to have all run-time dependencies satisfied.
                whitelist = self.ring_whitelist
            else:
                whitelist = set()

            whitelist |= to_ignore
            ignore_conflicts = self.ignore_conflicts | to_ignore

            check = self.cycle_check(project, repository, arch)
            if not check.success:
                self.logger.warning('Cycle check failed')
                result_comment.append(check.comment)
                result = False

            check = self.install_check(directories, arch, whitelist,
                                       ignore_conflicts)
            if not check.success:
                self.logger.warning('Install check failed')
                result_comment.append(check.comment)
                result = False

        duplicates = duplicated_binaries_in_repo(self.api.apiurl, project,
                                                 repository)
        # remove white listed duplicates
        for arch in list(duplicates):
            for binary in self.ignore_duplicated:
                duplicates[arch].pop(binary, None)
            if not len(duplicates[arch]):
                del duplicates[arch]
        if len(duplicates):
            self.logger.warning('Found duplicated binaries')
            result_comment.append(
                yaml.dump(duplicates, default_flow_style=False))
            result = False

        if result:
            self.report_state('success', self.gocd_url(), project, repository,
                              buildids)
        else:
            result_comment.insert(
                0, 'Generated from {}\n'.format(self.gocd_url()))
            self.report_state('failure',
                              self.upload_failure(project, result_comment),
                              project, repository, buildids)
            self.logger.warning('Not accepting {}'.format(project))
            return False

        return result
    def staging(self, project, force=False):
        api = self.api

        repository = self.api.cmain_repo

        # fetch the build ids at the beginning - mirroring takes a while
        buildids = {}
        try:
            architectures = self.target_archs(project, repository)
        except HTTPError as e:
            if e.code == 404:
                # adi disappear all the time, so don't worry
                return False
            raise e

        all_done = True
        for arch in architectures:
            pra = '{}/{}/{}'.format(project, repository, arch)
            buildid = self.buildid(project, repository, arch)
            if not buildid:
                self.logger.error('No build ID in {}'.format(pra))
                return False
            buildids[arch] = buildid
            url = self.report_url(project, repository, arch, buildid)
            try:
                root = ET.parse(osc.core.http_GET(url)).getroot()
                check = root.find('check[@name="installcheck"]/state')
                if check is not None and check.text != 'pending':
                    self.logger.info('{} already "{}", ignoring'.format(pra, check.text))
                else:
                    all_done = False
            except HTTPError:
                self.logger.info('{} has no status report'.format(pra))
                all_done = False

        if all_done and not force:
            return True

        repository_pairs = repository_path_expand(api.apiurl, project, repository)
        staging_pair = [project, repository]

        result = True

        status = api.project_status(project)
        if not status:
            self.logger.error('no project status for {}'.format(project))
            return False

        result_comment = []

        to_ignore = self.packages_to_ignore(project)
        meta = api.load_prj_pseudometa(status['description'])
        for req in meta['requests']:
            if req['type'] == 'delete':
                result = result and self.check_delete_request(req, to_ignore, result_comment)

        for arch in architectures:
            # hit the first repository in the target project (if existant)
            target_pair = None
            directories = []
            for pair_project, pair_repository in repository_pairs:
                # ignore repositories only inherited for config
                if repository_arch_state(self.api.apiurl, pair_project, pair_repository, arch):
                    if not target_pair and pair_project == api.project:
                        target_pair = [pair_project, pair_repository]

                    directories.append(self.mirror(pair_project, pair_repository, arch))

            if not api.is_adi_project(project):
                # For "leaky" ring packages in letter stagings, where the
                # repository setup does not include the target project, that are
                # not intended to to have all run-time dependencies satisfied.
                whitelist = self.ring_whitelist
            else:
                whitelist = self.existing_problems

            whitelist |= set(to_ignore)

            check = self.cycle_check(project, repository, arch)
            if not check.success:
                self.logger.warning('Cycle check failed')
                result_comment.append(check.comment)
                result = False

            check = self.install_check(target_pair, arch, directories, None, whitelist)
            if not check.success:
                self.logger.warning('Install check failed')
                result_comment.append(check.comment)
                result = False

        duplicates = duplicated_binaries_in_repo(self.api.apiurl, project, repository)
        # remove white listed duplicates
        for arch in list(duplicates):
            for binary in self.ignore_duplicated:
                duplicates[arch].pop(binary, None)
            if not len(duplicates[arch]):
                del duplicates[arch]
        if len(duplicates):
            self.logger.warning('Found duplicated binaries')
            result_comment.append(yaml.dump(duplicates, default_flow_style=False))
            result = False

        if result:
            self.report_state('success', self.gocd_url(), project, repository, buildids)
        else:
            result_comment.insert(0, 'Generated from {}\n'.format(self.gocd_url()))
            self.report_state('failure', self.upload_failure(project, result_comment), project, repository, buildids)
            self.logger.warning('Not accepting {}'.format(project))
            return False

        return result
    def staging(self, project, force=False):
        api = self.api

        repository = self.api.cmain_repo

        # fetch the build ids at the beginning - mirroring takes a while
        buildids = {}
        try:
            architectures = self.target_archs(project, repository)
        except HTTPError as e:
            if e.code == 404:
                # adi disappear all the time, so don't worry
                return False
            raise e

        all_done = True
        for arch in architectures:
            pra = '{}/{}/{}'.format(project, repository, arch)
            buildid = self.buildid(project, repository, arch)
            if not buildid:
                self.logger.error('No build ID in {}'.format(pra))
                return False
            buildids[arch] = buildid
            url = self.report_url(project, repository, arch, buildid)
            try:
                root = ET.parse(osc.core.http_GET(url)).getroot()
                check = root.find('check[@name="installcheck"]/state')
                if check is not None and check.text != 'pending':
                    self.logger.info('{} already "{}", ignoring'.format(pra, check.text))
                else:
                    all_done = False
            except HTTPError:
                self.logger.info('{} has no status report'.format(pra))
                all_done = False

        if all_done and not force:
            return True

        repository_pairs = repository_path_expand(api.apiurl, project, repository)
        staging_pair = [project, repository]

        result = True

        status = api.project_status(project)
        if not status:
            self.logger.error('no project status for {}'.format(project))
            return False

        result_comment = []

        to_ignore = self.packages_to_ignore(project)
        meta = api.load_prj_pseudometa(status['description'])
        for req in meta['requests']:
            if req['type'] == 'delete':
                result = result and self.check_delete_request(req, to_ignore, result_comment)

        for arch in architectures:
            # hit the first repository in the target project (if existant)
            target_pair = None
            directories = []
            for pair_project, pair_repository in repository_pairs:
                # ignore repositories only inherited for config
                if repository_arch_state(self.api.apiurl, pair_project, pair_repository, arch):
                    if not target_pair and pair_project == api.project:
                        target_pair = [pair_project, pair_repository]

                    directories.append(self.mirror(pair_project, pair_repository, arch))

            if not api.is_adi_project(project):
                # For "leaky" ring packages in letter stagings, where the
                # repository setup does not include the target project, that are
                # not intended to to have all run-time dependencies satisfied.
                whitelist = self.ring_whitelist
            else:
                whitelist = self.existing_problems

            whitelist |= set(to_ignore)

            check = self.cycle_check(project, repository, arch)
            if not check.success:
                self.logger.warn('Cycle check failed')
                result_comment.append(check.comment)
                result = False

            check = self.install_check(target_pair, arch, directories, None, whitelist)
            if not check.success:
                self.logger.warn('Install check failed')
                result_comment.append(check.comment)
                result = False

        if result:
            self.report_state('success', self.gocd_url(), project, repository, buildids)
        else:
            result_comment.insert(0, 'Generated from {}\n'.format(self.gocd_url()))
            self.report_state('failure', self.upload_failure(project, result_comment), project, repository, buildids)
            self.logger.warn('Not accepting {}'.format(project))
            return False

        return result
Esempio n. 17
0
 def expand_repos(self, project, repo='standard'):
     return repository_path_expand(self.apiurl, project, repo)