Exemple #1
0
    def _run_migration_internal(self, session, suites_from: List[ArchiveSuite],
                                suite_to: ArchiveSuite):

        mi_wspace = self._get_migrate_workspace(suites_from, suite_to)
        britney_conf = os.path.join(mi_wspace, 'britney.conf')
        if not os.path.isfile(britney_conf):
            log.warning(
                'No Britney config for migration run "{}" - maybe the configuration was not yet updated?'
                .format(self._get_migration_name(suites_from, suite_to)))
            return None

        log.info('Migration run for "{}"'.format(
            self._get_migration_name(suites_from, suite_to)))
        # ensure prerequisites are met and Britney is fed with all the data it needs
        self._prepare_source_data(session, mi_wspace, suites_from, suite_to)
        self._create_faux_packages(session, mi_wspace, suites_from, suite_to)
        self._collect_urgencies(mi_wspace)
        self._setup_dates(mi_wspace)
        self._setup_various(mi_wspace, suites_from, suite_to)

        # execute the migration tester
        self._britney.run(mi_wspace, britney_conf)

        # tell dak to import the new data (overriding the target suite)
        dak = DakBridge()
        heidi_result = self._postprocess_heidi_file(mi_wspace)
        ret = dak.set_suite_to_britney_result(suite_to.name, heidi_result)
        if not ret:
            return None

        res = self._retrieve_excuses(session, mi_wspace, suites_from, suite_to)
        return res
Exemple #2
0
    def __init__(self, module):
        self._module = str(module)
        lconf = LocalConfig()
        keyfile = lconf.secret_curve_keyfile_for_module(self._module)

        self._zctx = zmq.Context()
        self._socket = create_submit_socket(self._zctx)

        signer_id = None
        signing_key = None
        if os.path.isfile(keyfile):
            signer_id, signing_key = keyfile_read_signing_key(keyfile)

        if self._socket and not signing_key:
            log.warning(
                'Can not publish events: No valid signing key found for this module.'
            )
            self._socket = None
        self._signer_id = signer_id
        self._signing_key = signing_key
Exemple #3
0
    def set_suite_to_britney_result(self, suite_name: str,
                                    heidi_file: str) -> bool:
        '''
        Import a Britney result (HeidiResult file) into the dak database.
        This will *override* all existing package information in the target suite.
        Use this command with great care!
        '''

        # do some sanity checks
        if not os.path.isfile(heidi_file):
            log.warning(
                'Britney result not imported: File "{}" does not exist.'.
                format(heidi_file))
            return False

        # an empty file might cause us to delete the whole repository contents.
        # this is a safeguard against that.
        heidi_data = None
        with open(heidi_file, 'r') as f:
            heidi_data = f.read().strip()
        if not heidi_data:
            log.warning(
                'Stopped Britney result import: File "{}" is empty.'.format(
                    heidi_file))
            return True

        log.info('Importing britney result from {}'.format(heidi_file))

        # run dak control-suite command.
        args = ['control-suite']
        args.extend(['--set', suite_name, '--britney'])
        ret, out = self._run_dak(args, input_data=heidi_data, check=False)

        if ret != 0:
            raise Exception('Unable apply Britney result to "{}": {}'.format(
                suite_name, out))

        log.info('Updated packages in "{}" based on Britney result.'.format(
            suite_name))
        return True
Exemple #4
0
    def _postprocess_heidi_file(self, mi_wspace: str):
        heidi_result = os.path.join(mi_wspace, 'output', 'target',
                                    'HeidiResult')
        processed_result = os.path.join(mi_wspace, 'output', 'target', 'heidi',
                                        'current')

        final_data = []
        with open(heidi_result, 'r') as f:
            for line in f:
                parts = line.strip().split(' ')
                if len(parts) != 4:
                    log.warning(
                        'Found invalid line in Britney result: {}'.format(
                            line.strip()))
                    continue
                final_data.append('{} {} {}'.format(parts[0], parts[1],
                                                    parts[2]))

        os.makedirs(os.path.dirname(processed_result), exist_ok=True)
        with open(processed_result, 'w') as f:
            f.write('\n'.join(final_data))
            f.write('\n')

        return processed_result
Exemple #5
0
    def sync_packages(self, component: str, pkgnames: List[str], force: bool = False):
        self._synced_source_pkgs = []

        with session_scope() as session:
            sync_conf = session.query(SynchrotronConfig) \
                               .join(SynchrotronConfig.destination_suite) \
                               .join(SynchrotronConfig.source) \
                               .filter(ArchiveSuite.name == self._target_suite_name,
                                       SynchrotronSource.suite_name == self._source_suite_name).one_or_none()
            if not sync_conf:
                log.error('Unable to find a sync config for this source/destination combination.')
                return False

            if not sync_conf.sync_enabled:
                log.error('Can not synchronize package: Synchronization is disabled for this configuration.')
                return False

            target_suite = session.query(ArchiveSuite) \
                                  .filter(ArchiveSuite.name == self._target_suite_name).one()

            dest_pkg_map = self._get_target_source_packages(component)
            src_pkg_map = self._get_repo_source_package_map(self._source_repo,
                                                            self._source_suite_name,
                                                            component)

            for pkgname in pkgnames:
                spkg = src_pkg_map.get(pkgname)
                dpkg = dest_pkg_map.get(pkgname)

                if not spkg:
                    log.info('Can not sync {}: Does not exist in source.'.format(pkgname))
                    continue
                if pkgname in self._sync_blacklist:
                    log.info('Can not sync {}: The package is blacklisted.'.format(pkgname))
                    continue

                if dpkg:
                    if version_compare(dpkg.version, spkg.version) >= 0:
                        if force:
                            log.warning('{}: Target version \'{}\' is newer/equal than source version \'{}\'.'
                                        .format(pkgname, dpkg.version, spkg.version))
                        else:
                            log.info('Can not sync {}: Target version \'{}\' is newer/equal than source version \'{}\'.'
                                     .format(pkgname, dpkg.version, spkg.version))
                            continue

                    if not force:
                        if self._distro_tag in version_revision(dpkg.version):
                            log.error('Not syncing {}/{}: Destination has modifications (found {}).'
                                      .format(spkg.name, spkg.version, dpkg.version))
                            continue

                # sync source package
                # the source package must always be known to dak first
                ret = self._import_source_package(spkg, component)
                if not ret:
                    return False

            ret = self._import_binaries_for_source(sync_conf, target_suite, component, self._synced_source_pkgs, force)

            # TODO: Analyze the input, fetch the packages from the source distribution and
            # import them into the target in their correct order.
            # Then apply the correct, synced override from the source distro.

            self._publish_synced_spkg_events(sync_conf.source.os_name,
                                             sync_conf.source.suite_name,
                                             sync_conf.destination_suite.name,
                                             force)
            return ret
Exemple #6
0
    def _import_binaries_for_source(self, sync_conf, target_suite, component: str, spkgs: List[SourcePackage],
                                    ignore_target_changes: bool = False) -> bool:
        ''' Import binary packages for the given set of source packages into the archive. '''

        if not sync_conf.sync_binaries:
            log.debug('Skipping binary syncs.')
            return True

        # list of valid architectrures supported by the target
        target_archs = [a.name for a in target_suite.architectures]

        # cache of binary-package mappings for the source
        src_bpkg_arch_map = {}
        for aname in target_archs:
            src_bpkg_arch_map[aname] = self._get_repo_binary_package_map(self._source_repo, self._source_suite_name, component, aname)

        # cache of binary-package mappings from the target repository
        dest_bpkg_arch_map = {}
        for aname in target_archs:
            dest_bpkg_arch_map[aname] = self._get_repo_binary_package_map(self._target_repo, self._target_suite_name, component, aname)

        for spkg in spkgs:
            bin_files_synced = False
            existing_packages = False
            for arch_name in target_archs:
                if arch_name not in src_bpkg_arch_map:
                    continue

                src_bpkg_map = src_bpkg_arch_map[arch_name]
                dest_bpkg_map = dest_bpkg_arch_map[arch_name]

                bin_files = []
                for bin_i in spkg.binaries:
                    if bin_i.name not in src_bpkg_map:
                        if bin_i.name in dest_bpkg_map:
                            existing_packages = True  # package only exists in target
                        continue
                    if arch_name != 'all' and bin_i.architectures == ['all']:
                        # we handle arch:all explicitly
                        continue
                    bpkg = src_bpkg_map[bin_i.name]
                    if bin_i.version != bpkg.source_version:
                        log.debug('Not syncing binary package \'{}\': Version number \'{}\' does not match source package version \'{}\'.'
                                  .format(bpkg.name, bin_i.version, bpkg.source_version))
                        continue

                    ebpkg = dest_bpkg_map.get(bpkg.name)
                    if ebpkg:
                        if version_compare(ebpkg.version, bpkg.version) >= 0:
                            log.debug('Not syncing binary package \'{}/{}\': Existing binary package with bigger/equal version \'{}\' found.'
                                      .format(bpkg.name, bpkg.version, ebpkg.version))
                            existing_packages = True
                            continue

                        # Filter out manual rebuild uploads matching the pattern XbY.
                        # sometimes rebuild uploads of not-modified packages happen, and if the source
                        # distro did a binNMU, we don't want to sync that, even if it's bigger
                        # This rebuild-upload check must only happen if we haven't just updated the source package
                        # (in that case the source package version will be bigger than the existing binary package version)
                        if version_compare(spkg.version, ebpkg.version) >= 0:
                            if re.match(r'(.*)b([0-9]+)', ebpkg.version):
                                log.debug('Not syncing binary package \'{}/{}\': Existing binary package with rebuild upload \'{}\' found.'
                                          .format(bpkg.name, bpkg.version, ebpkg.version))
                                existing_packages = True
                                continue

                        if not ignore_target_changes and self._distro_tag in version_revision(ebpkg.version):
                            # safety measure, we should never get here as packages with modifications were
                            # filtered out previously.
                            log.debug('Can not sync binary package {}/{}: Target has modifications.'.format(bin_i.name, bin_i.version))
                            continue

                    fname = self._source_repo.get_file(bpkg.bin_file)
                    bin_files.append(fname)

                # now import the binary packages, if there is anything to import
                if bin_files:
                    bin_files_synced = True
                    ret = self._import_package_files(self._target_suite_name, component, bin_files)
                    if not ret:
                        return False

            if not bin_files_synced and not existing_packages:
                log.warning('No binary packages synced for source {}/{}'.format(spkg.name, spkg.version))

        return True
Exemple #7
0
    def _read_binary_packages_from_tf(self, tf, tf_fname, suite, component, arch, deb_type):
        requested_arch_is_all = arch.name == 'all'

        pkgs = []
        for e in tf:
            pkgname = e['Package']
            pkgversion = e['Version']
            if not pkgname or not pkgversion:
                raise Exception('Found invalid block (no Package and Version fields) in Packages file "{}".'.format(tf_fname))
                break

            arch_name = e['Architecture']

            # we deal with arch:all packages separately
            if not requested_arch_is_all and arch_name == 'all':
                continue

            # sanity check
            if arch_name != arch.name:
                log.warning('Found package "{}::{}/{}" with unexpeced architecture "{}" (expected "{}")'.format(self._name, pkgname, pkgversion, arch_name, arch.name))

            pkg = BinaryPackage()
            pkg.repo = self._repo_entity
            pkg.name = pkgname
            pkg.component = component
            pkg.version = pkgversion
            if suite not in pkg.suites:
                pkg.suites.append(suite)

            pkg.architecture = arch
            pkg.maintainer = e['Maintainer']

            source_id = e.get('Source')
            if not source_id:
                pkg.source_name = pkg.name
                pkg.source_version = pkg.version
            elif '(' in source_id:
                pkg.source_name = source_id[0:source_id.index('(') - 1].strip()
                pkg.source_version = source_id[source_id.index('(') + 1:source_id.index(')')].strip()
            else:
                pkg.source_name = source_id
                pkg.source_version = pkg.version

            pkg.size_installed = int(e.get('Installed-Size', '0'))

            pkg.depends = split_strip(e.get('Depends', ''), ',')
            pkg.pre_depends = split_strip(e.get('Pre-Depends', ''), ',')

            pkg.homepage = e.get('Homepage')
            pkg.section = e['Section']

            pkg.description = e['Description']
            pkg.description_md5 = e.get('Description-md5')

            pkg.priority = packagepriority_from_string(e['Priority'])

            pkg.bin_file = ArchiveFile()
            pkg.bin_file.fname = e['Filename']
            pkg.bin_file.size = int(e.get('Size', '0'))
            pkg.bin_file.sha256sum = e['SHA256']

            pkg.deb_type = DebType.DEB
            if pkg.bin_file.fname.endswith('.udeb'):
                pkg.deb_type = DebType.UDEB

            # do some issue-reporting
            if not pkg.bin_file.fname:
                log.warning('Binary package "{}/{}/{}" seems to have no files.'.format(pkg.name, pkg.version, arch.name))

            # update UUID and add package to results set
            pkg.update_uuid()
            pkgs.append(pkg)

        return pkgs
Exemple #8
0
    def source_packages(self, suite, component):
        ''' Return a list of all source packages in the given suite and component. '''
        assert type(suite) is ArchiveSuite
        assert type(component) is ArchiveComponent

        index_fname = self.index_file(suite.name, os.path.join(component.name, 'source', 'Sources.xz'))
        if not index_fname:
            return []

        pkgs = []
        with TagFile(index_fname) as tf:
            for e in tf:
                pkgname = e['Package']
                pkgversion = e['Version']
                if not pkgname or not pkgversion:
                    raise Exception('Found invalid block (no Package and Version fields) in Sources file "{}".'.format(index_fname))
                    break

                pkg = SourcePackage()
                pkg.repo = self._repo_entity
                pkg.name = pkgname
                pkg.component = component
                if suite not in pkg.suites:
                    pkg.suites.append(suite)

                pkg.version = pkgversion
                pkg.architectures = split_strip(e['Architecture'], ' ')
                pkg.standards_version = e.get('Standards-Version', '0~notset')
                pkg.format_version = e['Format']

                pkg.vcs_browser = e.get('Vcs-Browser')
                pkg.homepage = e.get('Homepage')
                pkg.maintainer = e['Maintainer']
                pkg.uploaders = split_strip(e.get('Uploaders', ''), ',')  # FIXME: Careful! Splitting just by comma isn't enough! We need to parse this properly.

                pkg.build_depends = split_strip(e.get('Build-Depends', ''), ',')
                pkg.directory = e['Directory']

                pkg.files = parse_checksums_list(e.get('Checksums-Sha256'), pkg.directory)

                binaries = []
                raw_pkg_list = e.get('Package-List', None)
                if not raw_pkg_list:
                    for bpname in e.get('Binary', '').split(','):
                        if not bpname:
                            continue
                        bpname = bpname.strip()
                        pi = PackageInfo()
                        pi.deb_type = DebType.DEB
                        pi.name = bpname
                        pi.ver = pkg.version
                        binaries.append(pi)
                else:
                    binaries = parse_package_list_str(raw_pkg_list, pkg.version)
                pkg.binaries = binaries

                # do some issue-reporting
                if not pkg.files and pkg.format_version != '1.0':
                    log.warning('Source package {}/{} seems to have no files (in {}).'.format(pkg.name, pkg.version, self.location))

                # add package to results set
                pkg.update_uuid()
                pkgs.append(pkg)

        return pkgs
Exemple #9
0
def update_appstream_data(session, local_repo, repo, suite, component, arch):
    '''
    Import AppStream metadata about software components and associate it with the
    binary packages the data belongs to.
    '''

    if arch.name == 'all':
        # arch:all has no AppStream components, those are always associated with an architecture
        # and are included in arch-specific files (even if the package they belong to is arch:all)
        return

    arch_all = session.query(ArchiveArchitecture) \
                      .filter(ArchiveArchitecture.name == 'all').one()

    yaml_fname = local_repo.index_file(
        suite,
        os.path.join(component.name, 'dep11',
                     'Components-{}.yml.xz'.format(arch.name)))
    if not yaml_fname:
        return

    cidmap_fname = local_repo.index_file(
        suite,
        os.path.join(component.name, 'dep11',
                     'CID-Index-{}.json.gz'.format(arch.name)),
        check=False)
    if not cidmap_fname:
        return

    with gzip.open(cidmap_fname, 'rb') as f:
        cid_map = json.loads(f.read())
    with lzma.open(yaml_fname, 'r') as f:
        yaml_data = str(f.read(), 'utf-8')

    mdata = AppStream.Metadata()
    mdata.set_locale('ALL')
    mdata.set_format_style(AppStream.FormatStyle.COLLECTION)
    mdata.set_parse_flags(AppStream.ParseFlags.IGNORE_MEDIABASEURL)

    mdata.parse(yaml_data, AppStream.FormatKind.YAML)
    cpts = mdata.get_components()
    if len(cpts) == 0:
        return

    log.debug('Found {} software components in {}/{}'.format(
        len(cpts), suite.name, component.name))

    # create context for direct serialization to collection XML chunk
    context = AppStream.Context()
    context.set_locale('ALL')
    context.set_style(AppStream.FormatStyle.COLLECTION)

    for cpt in cpts:
        cpt.set_active_locale('C')

        pkgname = cpt.get_pkgname()
        if not pkgname:
            # we skip these for now, web-apps have no package assigned - we might need a better way to map
            # those to their packages, likely with an improved appstream-generator integration
            log.debug(
                'Found DEP-11 component without package name in {}/{}: {}'.
                format(suite.name, component.name, cpt.get_id()))
            continue

        # fetch package this component belongs to
        bin_pkg = session.query(BinaryPackage) \
            .filter(BinaryPackage.name == pkgname) \
            .filter(BinaryPackage.repo_id == repo.id) \
            .filter(BinaryPackage.architecture_id.in_((arch.id, arch_all.id))) \
            .filter(BinaryPackage.component_id == component.id) \
            .filter(BinaryPackage.suites.any(ArchiveSuite.id == suite.id)) \
            .order_by(BinaryPackage.version.desc()).first()

        if not bin_pkg:
            log.info('Found orphaned DEP-11 component in {}/{}: {}'.format(
                suite.name, component.name, cpt.get_id()))
            continue

        dcpt = SoftwareComponent()
        dcpt.kind = int(cpt.get_kind())
        dcpt.cid = cpt.get_id()

        dcpt.xml = cpt.to_xml_data(context)

        dcpt.gcid = cid_map.get(dcpt.cid)
        if not dcpt.gcid:
            log.warning(
                'Found DEP-11 component without GCID in {}/{}: {}'.format(
                    suite.name, component.name, cpt.get_id()))
            continue

        # create UUID for this component (based on GCID or XML data)
        dcpt.update_uuid()

        existing_dcpt = session.query(SoftwareComponent) \
            .filter(SoftwareComponent.uuid == dcpt.uuid).one_or_none()
        if existing_dcpt:
            if bin_pkg in existing_dcpt.bin_packages:
                continue  # the binary package is already registered with this component
            existing_dcpt.bin_packages.append(bin_pkg)
            continue  # we already have this component, no need to add it again

        # add new software component to database
        dcpt.name = cpt.get_name()
        dcpt.summary = cpt.get_summary()
        dcpt.description = cpt.get_description()

        for icon in cpt.get_icons():
            if icon.get_kind() == AppStream.IconKind.CACHED:
                dcpt.icon_name = icon.get_name()
                break

        dcpt.project_license = cpt.get_project_license()
        dcpt.developer_name = cpt.get_developer_name()

        # test for free software
        dcpt.is_free = False
        if not dcpt.project_license:
            # We have no license set.
            # If we are in the 'main' component, we
            # assume we have free software
            if bin_pkg.component.name == 'main':
                dcpt.is_free = True
        else:
            # have AppStream test the SPDX license expression for free software
            dcpt.is_free = AppStream.license_is_free_license(
                dcpt.project_license)

        dcpt.categories = []
        for cat in cpt.get_categories():
            dcpt.categories.append(cat)

        dcpt.bin_packages = [bin_pkg]

        session.add(dcpt)
        log.debug('Added new software component \'{}\' to database'.format(
            dcpt.cid))
    session.commit()